From 2e5209f60307bb8d120a75dd83cb94ff0ada8c54 Mon Sep 17 00:00:00 2001 From: Gang Li Date: Wed, 9 Feb 2022 16:19:59 +0800 Subject: [PATCH 01/23] Chore: Fixed the dep versions To avoid some api changes which could cause errors --- requirements.txt | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/requirements.txt b/requirements.txt index 3ec25922..d35d6b0d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,9 +1,9 @@ -setuptools-rust -jinja2 -boto3 -boto3_type_annotations -botocore -click -requests -ruamel.yaml -defusedxml +setuptools-rust==1.1.2 +jinja2==3.0.3 +boto3==1.20.45 +botocore==1.23.45 +boto3_type_annotations==0.3.1 +click==8.0.3 +requests==2.27.1 +ruamel.yaml==0.17.20 +defusedxml==0.7.1 From 49bd922d1db99ee152e3c1428317aa07a52e21eb Mon Sep 17 00:00:00 2001 From: Gang Li Date: Fri, 28 Jan 2022 12:42:18 +0800 Subject: [PATCH 02/23] Merge pull request #128 from ligangty/main Fix: tests failure due to new boto3 release --- tests/base.py | 2 ++ tests/requirements.txt | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/base.py b/tests/base.py index 88eeeba8..011a5b0f 100644 --- a/tests/base.py +++ b/tests/base.py @@ -26,6 +26,7 @@ from charon.storage import PRODUCT_META_KEY, CHECKSUM_META_KEY from tests.commons import TEST_BUCKET from boto3_type_annotations import s3 +from moto import mock_s3 SHORT_TEST_PREFIX = "ga" @@ -82,6 +83,7 @@ def get_config_base(self) -> str: return os.path.join(self.get_temp_dir(), '.charon') +@mock_s3 class PackageBaseTest(BaseTest): def setUp(self): super().setUp() diff --git a/tests/requirements.txt b/tests/requirements.txt index 55d4ddd3..de68f7b1 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -5,5 +5,5 @@ pytest-cov pytest-html flake8 requests-mock -moto +moto==3.0.2.dev12 From a3c66da01ea88d79f3df5f290294328fe8d18215 Mon Sep 17 00:00:00 2001 From: Gang Li Date: Tue, 8 Feb 2022 22:44:03 +0800 Subject: [PATCH 03/23] Merge pull request #130 from ligangty/main Chore: Update year in license header --- charon/__init__.py | 2 +- charon/cmd/__init__.py | 2 +- charon/cmd/command.py | 2 +- charon/config.py | 2 +- charon/constants.py | 2 +- charon/pkgs/__init__.py | 2 +- charon/pkgs/indexing.py | 2 +- charon/pkgs/maven.py | 2 +- charon/pkgs/npm.py | 2 +- charon/storage.py | 2 +- charon/utils/__init__.py | 2 +- charon/utils/archive.py | 2 +- charon/utils/files.py | 2 +- charon/utils/logs.py | 2 +- setup.py | 2 +- test.sh | 3 +++ tests/base.py | 2 +- tests/test_config.py | 2 +- tests/test_indexing.py | 2 +- tests/test_maven_del.py | 2 +- tests/test_maven_index.py | 2 +- tests/test_maven_meta.py | 2 +- tests/test_maven_upload.py | 2 +- tests/test_npm_del.py | 2 +- tests/test_npm_index.py | 2 +- tests/test_npm_meta.py | 2 +- tests/test_npm_upload.py | 2 +- tests/test_pkgs_dryrun.py | 2 +- tests/test_s3client.py | 2 +- tests/test_util.py | 2 +- 30 files changed, 32 insertions(+), 29 deletions(-) diff --git a/charon/__init__.py b/charon/__init__.py index eb8203f5..f0a1e4d2 100644 --- a/charon/__init__.py +++ b/charon/__init__.py @@ -1,5 +1,5 @@ """ -Copyright (C) 2021 Red Hat, Inc. (https://github.com/Commonjava/charon) +Copyright (C) 2022 Red Hat, Inc. (https://github.com/Commonjava/charon) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/charon/cmd/__init__.py b/charon/cmd/__init__.py index dbff095e..9eefcae0 100644 --- a/charon/cmd/__init__.py +++ b/charon/cmd/__init__.py @@ -1,5 +1,5 @@ """ -Copyright (C) 2021 Red Hat, Inc. (https://github.com/Commonjava/charon) +Copyright (C) 2022 Red Hat, Inc. (https://github.com/Commonjava/charon) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/charon/cmd/command.py b/charon/cmd/command.py index fbc26bd3..6d2f556a 100644 --- a/charon/cmd/command.py +++ b/charon/cmd/command.py @@ -1,5 +1,5 @@ """ -Copyright (C) 2021 Red Hat, Inc. (https://github.com/Commonjava/charon) +Copyright (C) 2022 Red Hat, Inc. (https://github.com/Commonjava/charon) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/charon/config.py b/charon/config.py index 3e464fb8..00b1398d 100644 --- a/charon/config.py +++ b/charon/config.py @@ -1,5 +1,5 @@ """ -Copyright (C) 2021 Red Hat, Inc. (https://github.com/Commonjava/charon) +Copyright (C) 2022 Red Hat, Inc. (https://github.com/Commonjava/charon) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/charon/constants.py b/charon/constants.py index 242030f3..d38c32bc 100644 --- a/charon/constants.py +++ b/charon/constants.py @@ -1,5 +1,5 @@ """ -Copyright (C) 2021 Red Hat, Inc. (https://github.com/Commonjava/charon) +Copyright (C) 2022 Red Hat, Inc. (https://github.com/Commonjava/charon) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/charon/pkgs/__init__.py b/charon/pkgs/__init__.py index dbff095e..9eefcae0 100644 --- a/charon/pkgs/__init__.py +++ b/charon/pkgs/__init__.py @@ -1,5 +1,5 @@ """ -Copyright (C) 2021 Red Hat, Inc. (https://github.com/Commonjava/charon) +Copyright (C) 2022 Red Hat, Inc. (https://github.com/Commonjava/charon) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/charon/pkgs/indexing.py b/charon/pkgs/indexing.py index bcb5d1f5..d29914f8 100644 --- a/charon/pkgs/indexing.py +++ b/charon/pkgs/indexing.py @@ -1,5 +1,5 @@ """ -Copyright (C) 2021 Red Hat, Inc. (https://github.com/Commonjava/charon) +Copyright (C) 2022 Red Hat, Inc. (https://github.com/Commonjava/charon) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/charon/pkgs/maven.py b/charon/pkgs/maven.py index 77b95f52..80760894 100644 --- a/charon/pkgs/maven.py +++ b/charon/pkgs/maven.py @@ -1,5 +1,5 @@ """ -Copyright (C) 2021 Red Hat, Inc. (https://github.com/Commonjava/charon) +Copyright (C) 2022 Red Hat, Inc. (https://github.com/Commonjava/charon) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/charon/pkgs/npm.py b/charon/pkgs/npm.py index 52477e57..53a226d8 100644 --- a/charon/pkgs/npm.py +++ b/charon/pkgs/npm.py @@ -1,5 +1,5 @@ """ -Copyright (C) 2021 Red Hat, Inc. (https://github.com/Commonjava/charon) +Copyright (C) 2022 Red Hat, Inc. (https://github.com/Commonjava/charon) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/charon/storage.py b/charon/storage.py index 99a46d9e..994363cb 100644 --- a/charon/storage.py +++ b/charon/storage.py @@ -1,5 +1,5 @@ """ -Copyright (C) 2021 Red Hat, Inc. (https://github.com/Commonjava/charon) +Copyright (C) 2022 Red Hat, Inc. (https://github.com/Commonjava/charon) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/charon/utils/__init__.py b/charon/utils/__init__.py index dbff095e..9eefcae0 100644 --- a/charon/utils/__init__.py +++ b/charon/utils/__init__.py @@ -1,5 +1,5 @@ """ -Copyright (C) 2021 Red Hat, Inc. (https://github.com/Commonjava/charon) +Copyright (C) 2022 Red Hat, Inc. (https://github.com/Commonjava/charon) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/charon/utils/archive.py b/charon/utils/archive.py index 3dbd2081..ae6f6f3c 100644 --- a/charon/utils/archive.py +++ b/charon/utils/archive.py @@ -1,5 +1,5 @@ """ -Copyright (C) 2021 Red Hat, Inc. (https://github.com/Commonjava/charon) +Copyright (C) 2022 Red Hat, Inc. (https://github.com/Commonjava/charon) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/charon/utils/files.py b/charon/utils/files.py index 22559ab4..d2889b79 100644 --- a/charon/utils/files.py +++ b/charon/utils/files.py @@ -1,5 +1,5 @@ """ -Copyright (C) 2021 Red Hat, Inc. (https://github.com/Commonjava/charon) +Copyright (C) 2022 Red Hat, Inc. (https://github.com/Commonjava/charon) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/charon/utils/logs.py b/charon/utils/logs.py index 7b0f43c4..582a549d 100644 --- a/charon/utils/logs.py +++ b/charon/utils/logs.py @@ -1,5 +1,5 @@ """ -Copyright (C) 2021 Red Hat, Inc. (https://github.com/Commonjava/charon) +Copyright (C) 2022 Red Hat, Inc. (https://github.com/Commonjava/charon) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/setup.py b/setup.py index 6d10dbd2..5178e079 100755 --- a/setup.py +++ b/setup.py @@ -1,5 +1,5 @@ """ -Copyright (C) 2021 Red Hat, Inc. (https://github.com/Commonjava/charon) +Copyright (C) 2022 Red Hat, Inc. (https://github.com/Commonjava/charon) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/test.sh b/test.sh index bfcff1dc..aa57037d 100755 --- a/test.sh +++ b/test.sh @@ -43,6 +43,9 @@ function setup_charon() { if [[ $OS == "centos" ]]; then # Don't let builddep enable *-source repos since they give 404 errors $RUN rm -f /etc/yum.repos.d/CentOS-Sources.repo + # Resolve "Failed to download metadata for repo: Cannot prepare internal mirrorlist: No URLs in mirrorlist" problem + $RUN sed -i 's|#baseurl=http://mirror.centos.org|baseurl=http://vault.centos.org|g' /etc/yum.repos.d/CentOS-Linux-AppStream.repo + $RUN sed -i 's|#baseurl=http://mirror.centos.org|baseurl=http://vault.centos.org|g' /etc/yum.repos.d/CentOS-Linux-BaseOS.repo # This has to run *before* we try installing anything from EPEL $RUN $PKG $ENABLE_REPO install -y epel-release fi diff --git a/tests/base.py b/tests/base.py index 011a5b0f..2249d4e6 100644 --- a/tests/base.py +++ b/tests/base.py @@ -1,5 +1,5 @@ """ -Copyright (C) 2021 Red Hat, Inc. (https://github.com/Commonjava/charon) +Copyright (C) 2022 Red Hat, Inc. (https://github.com/Commonjava/charon) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/tests/test_config.py b/tests/test_config.py index a8aeceec..7014ed5d 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -1,5 +1,5 @@ """ -Copyright (C) 2021 Red Hat, Inc. (https://github.com/Commonjava/charon) +Copyright (C) 2022 Red Hat, Inc. (https://github.com/Commonjava/charon) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/tests/test_indexing.py b/tests/test_indexing.py index 1d03979a..3fb17545 100644 --- a/tests/test_indexing.py +++ b/tests/test_indexing.py @@ -1,5 +1,5 @@ """ -Copyright (C) 2021 Red Hat, Inc. (https://github.com/Commonjava/charon) +Copyright (C) 2022 Red Hat, Inc. (https://github.com/Commonjava/charon) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/tests/test_maven_del.py b/tests/test_maven_del.py index 79ea0bcf..d5aa221a 100644 --- a/tests/test_maven_del.py +++ b/tests/test_maven_del.py @@ -1,5 +1,5 @@ """ -Copyright (C) 2021 Red Hat, Inc. (https://github.com/Commonjava/charon) +Copyright (C) 2022 Red Hat, Inc. (https://github.com/Commonjava/charon) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/tests/test_maven_index.py b/tests/test_maven_index.py index 74402dbe..af50ea3b 100644 --- a/tests/test_maven_index.py +++ b/tests/test_maven_index.py @@ -1,5 +1,5 @@ """ -Copyright (C) 2021 Red Hat, Inc. (https://github.com/Commonjava/charon) +Copyright (C) 2022 Red Hat, Inc. (https://github.com/Commonjava/charon) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/tests/test_maven_meta.py b/tests/test_maven_meta.py index 21d8e409..7c86c6c4 100644 --- a/tests/test_maven_meta.py +++ b/tests/test_maven_meta.py @@ -1,5 +1,5 @@ """ -Copyright (C) 2021 Red Hat, Inc. (https://github.com/Commonjava/charon) +Copyright (C) 2022 Red Hat, Inc. (https://github.com/Commonjava/charon) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/tests/test_maven_upload.py b/tests/test_maven_upload.py index c1bcb822..fbf34500 100644 --- a/tests/test_maven_upload.py +++ b/tests/test_maven_upload.py @@ -1,5 +1,5 @@ """ -Copyright (C) 2021 Red Hat, Inc. (https://github.com/Commonjava/charon) +Copyright (C) 2022 Red Hat, Inc. (https://github.com/Commonjava/charon) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/tests/test_npm_del.py b/tests/test_npm_del.py index 67a8ec7c..34dcf230 100644 --- a/tests/test_npm_del.py +++ b/tests/test_npm_del.py @@ -1,5 +1,5 @@ """ -Copyright (C) 2021 Red Hat, Inc. (https://github.com/Commonjava/charon) +Copyright (C) 2022 Red Hat, Inc. (https://github.com/Commonjava/charon) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/tests/test_npm_index.py b/tests/test_npm_index.py index 9f5e29cb..944126d4 100644 --- a/tests/test_npm_index.py +++ b/tests/test_npm_index.py @@ -1,5 +1,5 @@ """ -Copyright (C) 2021 Red Hat, Inc. (https://github.com/Commonjava/charon) +Copyright (C) 2022 Red Hat, Inc. (https://github.com/Commonjava/charon) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/tests/test_npm_meta.py b/tests/test_npm_meta.py index 94de8746..5cff2321 100644 --- a/tests/test_npm_meta.py +++ b/tests/test_npm_meta.py @@ -1,5 +1,5 @@ """ -Copyright (C) 2021 Red Hat, Inc. (https://github.com/Commonjava/charon) +Copyright (C) 2022 Red Hat, Inc. (https://github.com/Commonjava/charon) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/tests/test_npm_upload.py b/tests/test_npm_upload.py index 47672fc2..10b8f45b 100644 --- a/tests/test_npm_upload.py +++ b/tests/test_npm_upload.py @@ -1,5 +1,5 @@ """ -Copyright (C) 2021 Red Hat, Inc. (https://github.com/Commonjava/charon) +Copyright (C) 2022 Red Hat, Inc. (https://github.com/Commonjava/charon) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/tests/test_pkgs_dryrun.py b/tests/test_pkgs_dryrun.py index c474fda7..1fd5abe5 100644 --- a/tests/test_pkgs_dryrun.py +++ b/tests/test_pkgs_dryrun.py @@ -1,5 +1,5 @@ """ -Copyright (C) 2021 Red Hat, Inc. (https://github.com/Commonjava/charon) +Copyright (C) 2022 Red Hat, Inc. (https://github.com/Commonjava/charon) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/tests/test_s3client.py b/tests/test_s3client.py index d086c46b..2a7dad80 100644 --- a/tests/test_s3client.py +++ b/tests/test_s3client.py @@ -1,5 +1,5 @@ """ -Copyright (C) 2021 Red Hat, Inc. (https://github.com/Commonjava/charon) +Copyright (C) 2022 Red Hat, Inc. (https://github.com/Commonjava/charon) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/tests/test_util.py b/tests/test_util.py index be44babf..584920c2 100644 --- a/tests/test_util.py +++ b/tests/test_util.py @@ -1,5 +1,5 @@ """ -Copyright (C) 2021 Red Hat, Inc. (https://github.com/Commonjava/charon) +Copyright (C) 2022 Red Hat, Inc. (https://github.com/Commonjava/charon) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. From caa7e95bfb26b28945d8fcc776aee8e91aaaa4d3 Mon Sep 17 00:00:00 2001 From: Gang Li Date: Wed, 16 Feb 2022 14:31:29 +0800 Subject: [PATCH 04/23] Merge pull request #136 from ligangty/main Chore: fix github ci test failure --- .github/workflows/linters.yaml | 2 +- .github/workflows/unittests.yaml | 2 +- test.sh | 13 ++++++++----- 3 files changed, 10 insertions(+), 7 deletions(-) diff --git a/.github/workflows/linters.yaml b/.github/workflows/linters.yaml index 82deeb2c..94a56e46 100644 --- a/.github/workflows/linters.yaml +++ b/.github/workflows/linters.yaml @@ -19,7 +19,7 @@ jobs: matrix: os: - name: centos - version: 8 + version: 7 python: 3 engine: docker diff --git a/.github/workflows/unittests.yaml b/.github/workflows/unittests.yaml index 1e72ad1f..4c45b551 100644 --- a/.github/workflows/unittests.yaml +++ b/.github/workflows/unittests.yaml @@ -18,7 +18,7 @@ jobs: matrix: os: - name: centos - version: 8 + version: 7 python: 3 engine: docker diff --git a/test.sh b/test.sh index aa57037d..a359cfb7 100755 --- a/test.sh +++ b/test.sh @@ -4,7 +4,7 @@ set -eux # Prepare env vars ENGINE=${ENGINE:="podman"} OS=${OS:="centos"} -OS_VERSION=${OS_VERSION:="8"} +OS_VERSION=${OS_VERSION:="7"} PYTHON_VERSION=${PYTHON_VERSION:="3"} ACTION=${ACTION:="test"} IMAGE="$OS:$OS_VERSION" @@ -33,19 +33,22 @@ function setup_charon() { PKG_EXTRA=(dnf-plugins-core git "$PYTHON"-pylint) BUILDDEP=(dnf builddep) if [[ $OS == "centos" ]]; then + PKG="yum" + PKG_EXTRA=(yum-utils git "$PYTHON"-pylint) + BUILDDEP=(yum-builddep) ENABLE_REPO= else ENABLE_REPO="--enablerepo=updates-testing" fi + PIP_INST=("$PIP" install --index-url "${PYPI_INDEX:-https://pypi.org/simple}") if [[ $OS == "centos" ]]; then # Don't let builddep enable *-source repos since they give 404 errors $RUN rm -f /etc/yum.repos.d/CentOS-Sources.repo - # Resolve "Failed to download metadata for repo: Cannot prepare internal mirrorlist: No URLs in mirrorlist" problem - $RUN sed -i 's|#baseurl=http://mirror.centos.org|baseurl=http://vault.centos.org|g' /etc/yum.repos.d/CentOS-Linux-AppStream.repo - $RUN sed -i 's|#baseurl=http://mirror.centos.org|baseurl=http://vault.centos.org|g' /etc/yum.repos.d/CentOS-Linux-BaseOS.repo + # $RUN rm -f /etc/yum.repos.d/CentOS-Linux-AppStream.repo + # $RUN rm -f /etc/yum.repos.d/CentOS-Linux-BaseOS.repo # This has to run *before* we try installing anything from EPEL $RUN $PKG $ENABLE_REPO install -y epel-release fi @@ -69,7 +72,7 @@ function setup_charon() { fi # install with RPM_PY_SYS=true to avoid error caused by installing on system python - $RUN sh -c "RPM_PY_SYS=true ${PIP_INST[*]} rpm-py-installer" + #$RUN sh -c "RPM_PY_SYS=true ${PIP_INST[*]} rpm-py-installer" # Setuptools install charon from source $RUN $PYTHON setup.py install From 05616a2773e5bb19af97ede7ea589762339ff0c7 Mon Sep 17 00:00:00 2001 From: Gang Li Date: Thu, 10 Mar 2022 18:15:12 +0800 Subject: [PATCH 05/23] Fix: wrong ignored metadata files of *archetype-catalog.xml --- charon/pkgs/maven.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/charon/pkgs/maven.py b/charon/pkgs/maven.py index 80760894..1b9ae27e 100644 --- a/charon/pkgs/maven.py +++ b/charon/pkgs/maven.py @@ -924,7 +924,7 @@ def __hash_decorate_metadata(path: str, metadata: str) -> List[str]: def _is_ignored(filename: str, ignore_patterns: List[str]) -> bool: for ignored_name in STANDARD_GENERATED_IGNORES: - if ignored_name in filename: + if filename and filename.startswith(ignored_name.strip()): logger.warning("Ignoring standard generated Maven path: %s", filename) return True From eb68a1479a22056d0b8c9e24e5ef809323e8488c Mon Sep 17 00:00:00 2001 From: shokakucarrier Date: Thu, 29 Jun 2023 13:40:34 +0800 Subject: [PATCH 06/23] Merge Release from main (#176) * Chore: fix a github action issue * bandit failure because of underlying container, so disabled * Fix pip install deprecation warning * Fix: Remove boto3_type_annotations from requirements due to rpm deps issue * validate config with schema * MMENG-3153 Signed-off-by: Harsh Modi * update rpm spec to fix rpm build * CLOUDBLD-3153 Signed-off-by: Harsh Modi * remove `ignore_patterns` as a required property * MMENG-3153 Signed-off-by: Harsh Modi * fix schema retrieval Instead of trying to fetch from relative paths, package schemas with charon and retrieve from pkg resources * MMENG-3153 Signed-off-by: Harsh Modi * bump charon version * MMENG-3153 Signed-off-by: Harsh Modi * make tests more robust * fix test paths to avoid failures when CWD is different * fix getenv * MMENG-3141 Signed-off-by: Harsh Modi * switch to tox env for running tests instead of relying on containers * MMENG-3141 Signed-off-by: Harsh Modi * update github actions to use tox * MMENG-3141 Signed-off-by: Harsh Modi * enable mypy type checkers * MMENG-3141 Signed-off-by: Harsh Modi * enable bandit analyzer * MMENG-3141 Signed-off-by: Harsh Modi * disable mypy it's not ideal currently to enable it. Revisit this later. * MMENG-3141 Signed-off-by: Harsh Modi * fix pytest issue current latest pytest==7.2.0 is having dependency conflicts Signed-off-by: Harsh Modi * initial implementation of singature for artifacts (#169) * initial implementation of singature for artifacts * add API doc to generate_sign function * update README file * use configuration file to controll artifacts needs sign * fix some minor problem * change suffix to exclude instead * remove unused logger * fix minor problem * change charon.yml to a proper extension, change name of suffix confi * add rpm-sign as method of signature (#170) * move subprocess command to configuration, make subprocess to run async (#172) * move subprocess command to configuration, make subprocess to run async * simplify template rendering of sign command * Remove unwanted file * changed semaphore passing in asynic function (#173) * use shlex to replace split instead (#174) * add maven sign unit test (#175) --------- Signed-off-by: Harsh Modi Co-authored-by: Gang Li Co-authored-by: Harsh Modi --- .github/workflows/linters.yaml | 126 +++++++++------- .github/workflows/unittests.yaml | 57 +++----- README.md | 6 +- charon.spec | 131 ++++++++--------- charon/cmd/command.py | 66 ++++++--- charon/config.py | 88 ++++------- charon/pkgs/maven.py | 138 +++++++++++------- charon/pkgs/npm.py | 117 ++++++++++----- charon/pkgs/signature.py | 131 +++++++++++++++++ charon/schemas/charon.json | 80 ++++++++++ charon/storage.py | 120 +++++++++++++-- charon/utils/yaml.py | 90 ++++++++++++ config/charon.yaml | 32 ++++ config/charon.yml | 14 -- pytest.ini | 3 - requirements.txt | 4 +- setup.py | 3 +- test-coverage.sh | 3 - test.sh | 109 -------------- tests/base.py | 36 +++-- tests/commons.py | 13 ++ tests/constants.py | 19 +++ tests/requirements.txt | 4 +- tests/test_archive.py | 6 +- tests/test_config.py | 48 +++--- tests/test_manifest_del.py | 17 ++- tests/test_manifest_upload.py | 9 +- tests/test_maven_del.py | 22 +-- tests/test_maven_del_multi_tgts.py | 26 ++-- tests/test_maven_index.py | 42 +++--- tests/test_maven_index_multi_tgts.py | 52 +++---- tests/test_maven_meta.py | 3 +- tests/test_maven_sign.py | 96 ++++++++++++ tests/test_maven_upload.py | 18 ++- tests/test_maven_upload_multi_tgts.py | 32 ++-- tests/test_npm_del.py | 17 ++- tests/test_npm_del_multi_tgts.py | 23 +-- tests/test_npm_dist_gen.py | 27 ++-- tests/test_npm_index.py | 22 +-- tests/test_npm_index_multi_tgts.py | 34 +++-- tests/test_npm_meta.py | 15 +- tests/test_npm_upload.py | 13 +- tests/test_npm_upload_multi_tgts.py | 21 +-- tests/test_pkgs_dryrun.py | 34 +++-- tests/test_s3client.py | 30 ++-- tests/test_util.py | 8 +- tests/utils/test_yaml.py | 202 ++++++++++++++++++++++++++ tox.ini | 40 +++++ 48 files changed, 1512 insertions(+), 735 deletions(-) create mode 100644 charon/pkgs/signature.py create mode 100644 charon/schemas/charon.json create mode 100644 charon/utils/yaml.py create mode 100644 config/charon.yaml delete mode 100644 config/charon.yml delete mode 100644 pytest.ini delete mode 100755 test-coverage.sh delete mode 100755 test.sh create mode 100644 tests/constants.py create mode 100644 tests/test_maven_sign.py create mode 100644 tests/utils/test_yaml.py create mode 100644 tox.ini diff --git a/.github/workflows/linters.yaml b/.github/workflows/linters.yaml index 94a56e46..86293445 100644 --- a/.github/workflows/linters.yaml +++ b/.github/workflows/linters.yaml @@ -9,80 +9,56 @@ on: - release_* jobs: - bandit: - name: Bandit analyzer for Python ${{ matrix.os.python }} on - ${{ matrix.os.name }}-${{ matrix.os.version }} + flake8: + name: Flake8 for Python ${{ matrix.python-version }} runs-on: ubuntu-latest strategy: - fail-fast: false matrix: - os: - - name: centos - version: 7 - python: 3 - engine: docker - - - name: fedora - version: 34 - python: 3 - engine: docker + python-version: [ "3.8" ] steps: - - uses: actions/checkout@v1 - - - run: ./test.sh - env: - OS: ${{ matrix.os.name }} - OS_VERSION: ${{ matrix.os.version }} - PYTHON_VERSION: ${{ matrix.os.python }} - ENGINE: ${{ matrix.os.engine }} - ACTION: bandit - - flake8: - name: Flake8 for Python 3 - runs-on: ubuntu-latest - - steps: - - name: Check out repo - uses: actions/checkout@v2 + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip setuptools tox - - name: Run flake8 for Python 3 - uses: containerbuildsystem/actions/flake8@master + - name: Run flake8 on python${{ matrix.python-version }} + run: python -m tox -e flake8 - # markdownlint: - # name: Markdownlint - # runs-on: ubuntu-latest + markdownlint: + name: Markdownlint + runs-on: ubuntu-latest - # steps: - # - name: Check out repo - # uses: actions/checkout@v2 + steps: + - name: Check out repo + uses: actions/checkout@v2 - # - name: Run markdownlint - # uses: containerbuildsystem/actions/markdownlint@master + - name: Run markdownlint + uses: containerbuildsystem/actions/markdownlint@master pylint: - name: Pylint analyzer for Python ${{ matrix.os.python }} + name: Pylint analyzer for Python ${{ matrix.python-version }} runs-on: ubuntu-latest strategy: matrix: - os: - - name: fedora - version: 34 - python: 3 - engine: docker + python-version: [ "3.8" ] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip setuptools tox - - run: ./test.sh - env: - OS: ${{ matrix.os.name }} - OS_VERSION: ${{ matrix.os.version }} - PYTHON_VERSION: ${{ matrix.os.python }} - ENGINE: ${{ matrix.os.engine }} - ACTION: pylint + - name: Run pylint on python${{ matrix.python-version }} + run: python -m tox -e pylint shellcheck: name: Shellcheck @@ -94,3 +70,43 @@ jobs: - name: Run ShellCheck uses: containerbuildsystem/actions/shellcheck@master + +# mypy: +# name: mypy type checker for Python ${{ matrix.python-version }} +# runs-on: ubuntu-latest +# +# strategy: +# matrix: +# python-version: [ "3.8" ] +# +# steps: +# - uses: actions/checkout@v3 +# - uses: actions/setup-python@v4 +# with: +# python-version: ${{ matrix.python-version }} +# - name: Install dependencies +# run: | +# python -m pip install --upgrade pip setuptools tox +# +# - name: Run mypy on python${{ matrix.python-version }} +# run: python -m tox -e mypy + + bandit: + name: Bandit analyzer for Python ${{ matrix.python-version }} + runs-on: ubuntu-latest + + strategy: + matrix: + python-version: [ "3.8" ] + + steps: + - uses: actions/checkout@v1 + - uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip setuptools tox + + - name: Run bandit analyzer on python${{ matrix.python-version }} + run: python -m tox -e bandit diff --git a/.github/workflows/unittests.yaml b/.github/workflows/unittests.yaml index 4c45b551..3140088e 100644 --- a/.github/workflows/unittests.yaml +++ b/.github/workflows/unittests.yaml @@ -10,68 +10,51 @@ on: jobs: test: - name: Python ${{ matrix.os.python }} tests on ${{ matrix.os.name }}-${{ matrix.os.version }} - runs-on: ubuntu-18.04 + name: Python ${{ matrix.python-version }} tests + runs-on: ubuntu-latest strategy: fail-fast: false matrix: - os: - - name: centos - version: 7 - python: 3 - engine: docker - - - name: fedora - version: 33 - python: 3 - engine: docker - - - name: fedora - version: 34 - python: 3 - engine: docker + python-version: ["3.7", "3.8", "3.9"] steps: - - name: Check out repo - uses: actions/checkout@v2 + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip setuptools tox - - name: pytests via test.sh - env: - OS: ${{ matrix.os.name }} - OS_VERSION: ${{ matrix.os.version }} - PYTHON_VERSION: ${{ matrix.os.python }} - ENGINE: ${{ matrix.os.engine }} - run: ./test.sh + - name: Run unittests on python${{ matrix.python-version }} + run: python -m tox -e test - name: Upload pytest html report uses: actions/upload-artifact@v2 if: failure() with: - path: __pytest_reports/atomic-reactor-unit-tests.html - name: atomic-reactor-unit-tests_${{ matrix.os.name }}_${{ matrix.os.version }}.python${{ matrix.os.python }}.html + path: __pytest_reports/charon-unit-tests.html + name: charon-unit-tests_python${{ matrix.os.python }}.html + - name: Run coveralls-python env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - COVERALLS_FLAG_NAME: ${{ matrix.os.name }}-${{ matrix.os.version }}-python${{ matrix.os.python }} + COVERALLS_FLAG_NAME: python${{ matrix.python-version }} COVERALLS_PARALLEL: true run: | - pip3 install --upgrade pip - pip3 install --upgrade setuptools - pip3 install --upgrade coveralls - /home/runner/.local/bin/coveralls --service=github + python -m pip install --upgrade coveralls + python -m coveralls --service=github coveralls-finish: name: Finish coveralls-python needs: test - runs-on: ubuntu-18.04 + runs-on: ubuntu-latest steps: - name: Finished run: | - pip3 install --upgrade pip - pip3 install --upgrade setuptools - pip3 install --upgrade coveralls + pip3 install --upgrade pip setuptools coveralls /home/runner/.local/bin/coveralls --finish --service=github env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/README.md b/README.md index 406de866..cdd1608d 100644 --- a/README.md +++ b/README.md @@ -14,6 +14,10 @@ future. And Ronda service will be hosted in AWS S3. See [AWS CLi V2 installation](https://docs.aws.amazon.com/cli/latest/userguide/install-cliv2-linux.html#cliv2-linux-install) +### [Optional] rpm-sign or GnuPG CLI tool + +Can be configured to use rpm-sign or any command to generate .asc file. + ## Installation ### From git @@ -49,7 +53,7 @@ to configure AWS access credentials. ### charon-upload: upload a repo to S3 ```bash -usage: charon upload $tarball --product/-p ${prod} --version/-v ${ver} [--root_path] [--ignore_patterns] [--debug] +usage: charon upload $tarball --product/-p ${prod} --version/-v ${ver} [--root_path] [--ignore_patterns] [--debug] [--contain_signature] [--key] ``` This command will upload the repo in tarball to S3. diff --git a/charon.spec b/charon.spec index a1888098..abe7d7e5 100644 --- a/charon.spec +++ b/charon.spec @@ -1,89 +1,90 @@ -%global binaries_py_version %{python3_version} %global owner Commonjava -%global project charon +%global modulename charon + +%global charon_version 1.1.2 +%global sdist_tar_name %{modulename}-%{charon_version} + +%global python3_pkgversion 3 + +Name: %{modulename} +Summary: Charon CLI +Version: %{charon_version} +Release: 1%{?dist} +URL: https://github.com/%{owner}/%{modulename} +Source0: %{url}/archive/%{charon_version}.tar.gz +Provides: %{modulename} = %{version}-%{release} + +Group: Development/Tools +License: APLv2 + +# Build Requirements +BuildArch: x86_64 + +BuildRequires: python%{python3_pkgversion}-setuptools +BuildRequires: python%{python3_pkgversion}-devel + +Requires: python%{python3_pkgversion}-boto3 +Requires: python%{python3_pkgversion}-botocore +Requires: python%{python3_pkgversion}-jinja2 +Requires: python%{python3_pkgversion}-markupsafe +Requires: python%{python3_pkgversion}-dateutil +Requires: python%{python3_pkgversion}-six +Requires: python%{python3_pkgversion}-jmespath +Requires: python%{python3_pkgversion}-urllib3 +Requires: python%{python3_pkgversion}-s3transfer +Requires: python%{python3_pkgversion}-click +Requires: python%{python3_pkgversion}-requests +Requires: python%{python3_pkgversion}-idna +Requires: python%{python3_pkgversion}-chardet +Requires: python%{python3_pkgversion}-cryptography +Requires: python%{python3_pkgversion}-cffi +Requires: python%{python3_pkgversion}-pycparser +Requires: python%{python3_pkgversion}-certifi +Requires: python%{python3_pkgversion}-pyOpenSSL +Requires: python%{python3_pkgversion}-ruamel-yaml +Requires: python%{python3_pkgversion}-defusedxml +Requires: python%{python3_pkgversion}-semantic-version +Requires: python%{python3_pkgversion}-subresource-integrity +Requires: python%{python3_pkgversion}-jsonschema +Requires: python%{python3_pkgversion}-importlib-metadata +Requires: python%{python3_pkgversion}-zipp +Requires: python%{python3_pkgversion}-attrs +Requires: python%{python3_pkgversion}-pyrsistent -Name: %{project} -Version: 1.0.0 -Release: 1%{?dist} - -Summary: Charon CLI -Group: Development/Tools -License: APLv2 -URL: https://github.com/%{owner}/%{project} -Source0: https://github.com/%{owner}/%{project}/archive/%{version}.tar.gz - -BuildArch: noarch - -Requires: python3-charon = %{version}-%{release} -Requires: git >= 1.7.10 - -BuildRequires: python3-devel -BuildRequires: python3-setuptools %description Simple Python tool with command line interface for charon init, upload, delete, gen and ls functions. -%package -n python3-charon -Summary: Python 3 CHARON library -Group: Development/Tools -License: APLv2 -Requires: python3-requests -Requires: python3-setuptools -Requires: python3-rpm -%{?python_provide:%python_provide python3-charon} - -%description -n python3-charon -Simple Python 3 library for CHARON functions. - %prep -%setup -q +%autosetup -p1 -n %{sdist_tar_name} %build +# Disable debuginfo packages +%define _enable_debug_package 0 +%define debug_package %{nil} %py3_build %install +export LANG=en_US.UTF-8 LANGUAGE=en_US.en LC_ALL=en_US.UTF-8 %py3_install -mv %{buildroot}%{_bindir}/charon %{buildroot}%{_bindir}/charon-%{python3_version} -ln -s %{_bindir}/charon-%{python3_version} %{buildroot}%{_bindir}/charon-3 - -ln -s %{_bindir}/charon-%{binaries_py_version} %{buildroot}%{_bindir}/charon - -# ship charon in form of tarball so it can be installed within build image -mkdir -p %{buildroot}/%{_datadir}/%{name}/ -cp -a %{sources} %{buildroot}/%{_datadir}/%{name}/charon.tar.gz - -# setup docs -#mkdir -p %{buildroot}%{_mandir}/man1 -#cp -a docs/manpage/charon.1 %{buildroot}%{_mandir}/man1/ %files +%defattr(-,root,root) %doc README.md -#%{_mandir}/man1/charon.1* +%{_bindir}/%{modulename}* +%{python3_sitelib}/* %{!?_licensedir:%global license %doc} %license LICENSE -%{_bindir}/charon - -%files -n python3-charon -%doc README.md -%{!?_licensedir:%global license %doc} -%license LICENSE -%{_bindir}/charon-%{python3_version} -%{_bindir}/charon-3 -#%{_mandir}/man1/charon.1* -%dir %{python3_sitelib}/charon -%dir %{python3_sitelib}/charon/__pycache__ -%{python3_sitelib}/charon/*.* -%{python3_sitelib}/charon/cmd -%{python3_sitelib}/charon/pkgs -%{python3_sitelib}/charon/utils -%{python3_sitelib}/charon/__pycache__/*.py* -%{python3_sitelib}/charon_*.egg-info -%dir %{_datadir}/%{name} -# ship charon in form of tarball so it can be installed within build image -%{_datadir}/%{name}/charon.tar.gz %changelog +* Tue Sep 20 2022 Harsh Modi +- 1.1.2 release +- add configuration schema and validation +- allow specifying multiple target buckets + +* Thu Aug 25 2022 Harsh Modi +- 1.1.1 release diff --git a/charon/cmd/command.py b/charon/cmd/command.py index cbb4e110..2ef88aed 100644 --- a/charon/cmd/command.py +++ b/charon/cmd/command.py @@ -16,6 +16,7 @@ from typing import List, Tuple from charon.config import CharonConfig, get_config +from charon.constants import DEFAULT_REGISTRY from charon.utils.logs import set_logging from charon.utils.archive import detect_npm_archive, download_archive, NpmArchiveType from charon.pkgs.maven import handle_maven_uploading, handle_maven_del @@ -60,6 +61,7 @@ @option( "--target", "-t", + 'targets', help=""" The target to do the uploading, which will decide which s3 bucket and what root path where all files will be uploaded to. @@ -94,6 +96,22 @@ be extracted, when needed. """, ) +@option( + "--contain_signature", + "-s", + is_flag=True, + help=""" + Toggle signature generation and upload feature in charon. + """ +) +@option( + "--sign_key", + "-k", + help=""" + rpm-sign key to be used, will replace {{ key }} in default configuration for signature. + Does noting if detach_signature_command does not contain {{ key }} field. + """, +) @option( "--debug", "-D", @@ -114,10 +132,12 @@ def upload( repo: str, product: str, version: str, - target: List[str], + targets: List[str], root_path="maven-repository", ignore_patterns: List[str] = None, work_dir: str = None, + contain_signature: bool = False, + sign_key: str = "redhatdevel", debug=False, quiet=False, dryrun=False @@ -147,15 +167,17 @@ def upload( npm_archive_type = detect_npm_archive(archive_path) product_key = f"{product}-{version}" manifest_bucket_name = conf.get_manifest_bucket() - targets_ = __get_targets(target, conf) + buckets = __get_buckets(targets, conf) if npm_archive_type != NpmArchiveType.NOT_NPM: logger.info("This is a npm archive") tmp_dir, succeeded = handle_npm_uploading( archive_path, product_key, - targets=targets_, + buckets=buckets, aws_profile=aws_profile, dir_=work_dir, + gen_sign=contain_signature, + key=sign_key, dry_run=dryrun, manifest_bucket_name=manifest_bucket_name ) @@ -173,9 +195,11 @@ def upload( product_key, ignore_patterns_list, root=root_path, - targets=targets_, + buckets=buckets, aws_profile=aws_profile, dir_=work_dir, + gen_sign=contain_signature, + key=sign_key, dry_run=dryrun, manifest_bucket_name=manifest_bucket_name ) @@ -217,6 +241,7 @@ def upload( @option( "--target", "-t", + 'targets', help=""" The target to do the deletion, which will decide which s3 bucket and what root path where all files will be deleted from. @@ -270,7 +295,7 @@ def delete( repo: str, product: str, version: str, - target: List[str], + targets: List[str], root_path="maven-repository", ignore_patterns: List[str] = None, work_dir: str = None, @@ -303,13 +328,13 @@ def delete( npm_archive_type = detect_npm_archive(archive_path) product_key = f"{product}-{version}" manifest_bucket_name = conf.get_manifest_bucket() - targets_ = __get_targets(target, conf) + buckets = __get_buckets(targets, conf) if npm_archive_type != NpmArchiveType.NOT_NPM: logger.info("This is a npm archive") tmp_dir, succeeded = handle_npm_del( archive_path, product_key, - targets=targets_, + buckets=buckets, aws_profile=aws_profile, dir_=work_dir, dry_run=dryrun, @@ -329,7 +354,7 @@ def delete( product_key, ignore_patterns_list, root=root_path, - targets=targets_, + buckets=buckets, aws_profile=aws_profile, dir_=work_dir, dry_run=dryrun, @@ -345,22 +370,15 @@ def delete( __safe_delete(tmp_dir) -def __get_targets(target: List[str], conf: CharonConfig) -> List[Tuple[str, str, str, str]]: - targets_ = [] - for tgt in target: - aws_bucket = conf.get_aws_bucket(tgt) - if not aws_bucket: - continue - prefix = conf.get_bucket_prefix(tgt) - registry = conf.get_bucket_registry(tgt) - targets_.append([tgt, aws_bucket, prefix, registry]) - if len(targets_) == 0: - logger.error( - "All targets are not valid or configured, " - "please check your charon configurations." - ) - sys.exit(1) - return targets_ +def __get_buckets(targets: List[str], conf: CharonConfig) -> List[Tuple[str, str, str, str]]: + buckets = [] + for target in targets: + for bucket in conf.get_target(target): + aws_bucket = bucket.get('bucket') + prefix = bucket.get('prefix', '') + registry = bucket.get('registry', DEFAULT_REGISTRY) + buckets.append((target, aws_bucket, prefix, registry)) + return buckets def __safe_delete(tmp_dir: str): diff --git a/charon/config.py b/charon/config.py index bb7308fa..8f128617 100644 --- a/charon/config.py +++ b/charon/config.py @@ -13,14 +13,11 @@ See the License for the specific language governing permissions and limitations under the License. """ -from typing import Dict, List -from ruamel.yaml import YAML -from pathlib import Path -import os import logging +import os +from typing import Dict, List, Optional -from charon.utils.strings import remove_prefix -from charon.constants import DEFAULT_REGISTRY +from charon.utils.yaml import read_yaml_from_file_path CONFIG_FILE = "charon.yaml" @@ -33,82 +30,49 @@ class CharonConfig(object): The configuration file will be named as charon.yaml, and will be stored in $HOME/.charon/ folder by default. """ + def __init__(self, data: Dict): self.__ignore_patterns: List[str] = data.get("ignore_patterns", None) self.__aws_profile: str = data.get("aws_profile", None) self.__targets: Dict = data.get("targets", None) - if not self.__targets or not isinstance(self.__targets, Dict): - raise TypeError("Charon configuration is not correct: targets is invalid.") self.__manifest_bucket: str = data.get("manifest_bucket", None) + self.__ignore_signature_suffix: Dict = data.get("ignore_signature_suffix", None) + self.__signature_command: str = data.get("detach_signature_command", None) def get_ignore_patterns(self) -> List[str]: return self.__ignore_patterns + def get_target(self, target: str) -> List[Dict]: + target_: List = self.__targets.get(target, []) + if not target_: + logger.error("The target %s is not found in charon configuration.", target) + return target_ + def get_aws_profile(self) -> str: return self.__aws_profile - def get_aws_bucket(self, target: str) -> str: - target_: Dict = self.__targets.get(target, None) - if not target_ or not isinstance(target_, Dict): - logger.error("The target %s is not found in charon configuration.", target) - return None - bucket = target_.get("bucket", None) - if not bucket: - logger.error("The bucket is not found for target %s " - "in charon configuration.", target) - return bucket - - def get_bucket_prefix(self, target: str) -> str: - target_: Dict = self.__targets.get(target, None) - if not target_ or not isinstance(target_, Dict): - logger.error("The target %s is not found in charon " - "configuration.", target) - return None - prefix = target_.get("prefix", None) - if not prefix: - logger.warning("The prefix is not found for target %s " - "in charon configuration, so no prefix will " - "be used", target) - prefix = "" - # removing first slash as it is not needed. - prefix = remove_prefix(prefix, "/") - return prefix - - def get_bucket_registry(self, target: str) -> str: - target_: Dict = self.__targets.get(target, None) - if not target_ or not isinstance(target_, Dict): - logger.error("The target %s is not found in charon configuration.", target) - return None - registry = target_.get("registry", None) - if not registry: - registry = DEFAULT_REGISTRY - logger.error("The registry is not found for target %s " - "in charon configuration, so DEFAULT_REGISTRY(localhost) will be used.", - target) - return registry - def get_manifest_bucket(self) -> str: return self.__manifest_bucket + def get_ignore_signature_suffix(self, package_type: str) -> List[str]: + xartifact_list: List = self.__ignore_signature_suffix.get(package_type) + if not xartifact_list: + logger.error("package type %s does not have ignore artifact config.", package_type) + return xartifact_list + + def get_detach_signature_command(self) -> str: + return self.__signature_command + -def get_config() -> CharonConfig: - config_file = os.path.join(os.getenv("HOME"), ".charon", CONFIG_FILE) - try: - yaml = YAML(typ='safe') - data = yaml.load(stream=Path(config_file)) - except Exception as e: - logger.error("Can not load charon config file due to error: %s", e) - return None - try: - return CharonConfig(data) - except TypeError as e: - logger.error(e) - return None +def get_config() -> Optional[CharonConfig]: + config_file_path = os.path.join(os.getenv("HOME"), ".charon", CONFIG_FILE) + data = read_yaml_from_file_path(config_file_path, 'schemas/charon.json') + return CharonConfig(data) def get_template(template_file: str) -> str: template = os.path.join( - os.getenv("HOME"), ".charon/template", template_file + os.getenv("HOME", ''), ".charon/template", template_file ) if os.path.isfile(template): with open(template, encoding="utf-8") as file_: diff --git a/charon/pkgs/maven.py b/charon/pkgs/maven.py index cabc87ef..9fd57422 100644 --- a/charon/pkgs/maven.py +++ b/charon/pkgs/maven.py @@ -15,12 +15,13 @@ """ from charon.utils.files import HashType import charon.pkgs.indexing as indexing +import charon.pkgs.signature as signature from charon.utils.files import overwrite_file, digest, write_manifest from charon.utils.archive import extract_zip_all from charon.utils.strings import remove_prefix from charon.storage import S3Client from charon.pkgs.pkg_utils import upload_post_process, rollback_post_process -from charon.config import get_template +from charon.config import CharonConfig, get_template, get_config from charon.constants import (META_FILE_GEN_KEY, META_FILE_DEL_KEY, META_FILE_FAILED, MAVEN_METADATA_TEMPLATE, ARCHETYPE_CATALOG_TEMPLATE, ARCHETYPE_CATALOG_FILENAME, @@ -256,10 +257,12 @@ def handle_maven_uploading( prod_key: str, ignore_patterns=None, root="maven-repository", - targets: List[Tuple[str, str, str, str]] = None, + buckets: List[Tuple[str, str, str, str]] = None, aws_profile=None, dir_=None, do_index=True, + gen_sign=False, + key=None, dry_run=False, manifest_bucket_name=None ) -> Tuple[str, bool]: @@ -304,10 +307,10 @@ def handle_maven_uploading( # 4. Do uploading s3_client = S3Client(aws_profile=aws_profile, dry_run=dry_run) - targets_ = [(target[1], remove_prefix(target[2], "/")) for target in targets] + targets_ = [(bucket[1], remove_prefix(bucket[2], "/")) for bucket in buckets] logger.info( "Start uploading files to s3 buckets: %s", - [target[1] for target in targets] + [bucket[1] for bucket in buckets] ) failed_files = s3_client.upload_files( file_paths=valid_mvn_paths, @@ -317,7 +320,8 @@ def handle_maven_uploading( ) logger.info("Files uploading done\n") succeeded = True - for target in targets: + generated_signs = [] + for bucket in buckets: # 5. Do manifest uploading if not manifest_bucket_name: logger.warning( @@ -325,7 +329,7 @@ def handle_maven_uploading( 'uploading\n') else: logger.info("Start uploading manifest to s3 bucket %s", manifest_bucket_name) - manifest_folder = target[1] + manifest_folder = bucket[1] manifest_name, manifest_full_path = write_manifest(valid_mvn_paths, top_level, prod_key) s3_client.upload_manifest( manifest_name, manifest_full_path, @@ -334,39 +338,38 @@ def handle_maven_uploading( logger.info("Manifest uploading is done\n") # 6. Use uploaded poms to scan s3 for metadata refreshment - bucket_ = target[1] - prefix__ = remove_prefix(target[2], "/") - failed_metas = [] - logger.info("Start generating maven-metadata.xml files for bucket %s", bucket_) + bucket_name = bucket[1] + prefix = remove_prefix(bucket[2], "/") + logger.info("Start generating maven-metadata.xml files for bucket %s", bucket_name) meta_files = _generate_metadatas( - s3=s3_client, bucket=bucket_, + s3=s3_client, bucket=bucket_name, poms=valid_poms, root=top_level, - prefix=prefix__ + prefix=prefix ) logger.info("maven-metadata.xml files generation done\n") failed_metas = meta_files.get(META_FILE_FAILED, []) # 7. Upload all maven-metadata.xml if META_FILE_GEN_KEY in meta_files: - logger.info("Start updating maven-metadata.xml to s3 bucket %s", bucket_) + logger.info("Start updating maven-metadata.xml to s3 bucket %s", bucket_name) _failed_metas = s3_client.upload_metadatas( meta_file_paths=meta_files[META_FILE_GEN_KEY], - target=(bucket_, prefix__), + target=(bucket_name, prefix), product=None, root=top_level ) failed_metas.extend(_failed_metas) - logger.info("maven-metadata.xml updating done in bucket %s\n", bucket_) + logger.info("maven-metadata.xml updating done in bucket %s\n", bucket_name) # 8. Determine refreshment of archetype-catalog.xml if os.path.exists(os.path.join(top_level, "archetype-catalog.xml")): - logger.info("Start generating archetype-catalog.xml for bucket %s", bucket_) + logger.info("Start generating archetype-catalog.xml for bucket %s", bucket_name) upload_archetype_file = _generate_upload_archetype_catalog( - s3=s3_client, bucket=bucket_, + s3=s3_client, bucket=bucket_name, root=top_level, - prefix=prefix__ + prefix=prefix ) - logger.info("archetype-catalog.xml files generation done in bucket %s\n", bucket_) + logger.info("archetype-catalog.xml files generation done in bucket %s\n", bucket_name) # 9. Upload archetype-catalog.xml if it has changed if upload_archetype_file: @@ -374,31 +377,60 @@ def handle_maven_uploading( archetype_files.extend( __hash_decorate_metadata(top_level, ARCHETYPE_CATALOG_FILENAME) ) - logger.info("Start updating archetype-catalog.xml to s3 bucket %s", bucket_) + logger.info("Start updating archetype-catalog.xml to s3 bucket %s", bucket_name) _failed_metas = s3_client.upload_metadatas( meta_file_paths=archetype_files, - target=(bucket_, prefix__), + target=(bucket_name, prefix), product=None, root=top_level ) failed_metas.extend(_failed_metas) - logger.info("archetype-catalog.xml updating done in bucket %s\n", bucket_) + logger.info("archetype-catalog.xml updating done in bucket %s\n", bucket_name) + + # 10. Generate signature file if contain_signature is set to True + if gen_sign: + conf = get_config() + if not conf: + sys.exit(1) + suffix_list = __get_suffix(PACKAGE_TYPE_MAVEN, conf) + command = conf.get_detach_signature_command() + artifacts = [s for s in valid_mvn_paths if not s.endswith(tuple(suffix_list))] + logger.info("Start generating signature for s3 bucket %s\n", bucket_name) + (_failed_metas, _generated_signs) = signature.generate_sign( + PACKAGE_TYPE_MAVEN, artifacts, + top_level, prefix, + s3_client, bucket_name, + key, command + ) + failed_metas.extend(_failed_metas) + generated_signs.extend(_generated_signs) + logger.info("Singature generation done.\n") + + logger.info("Start upload singature files to s3 bucket %s\n", bucket_name) + _failed_metas = s3_client.upload_signatures( + meta_file_paths=generated_signs, + target=(bucket_name, prefix), + product=None, + root=top_level + ) + failed_metas.extend(_failed_metas) + logger.info("Signature uploading done.\n") # this step generates index.html for each dir and add them to file list # index is similar to metadata, it will be overwritten everytime if do_index: - logger.info("Start generating index files to s3 bucket %s", bucket_) + logger.info("Start generating index files to s3 bucket %s", bucket_name) created_indexes = indexing.generate_indexes( PACKAGE_TYPE_MAVEN, top_level, valid_dirs, - s3_client, bucket_, prefix__ + s3_client, bucket_name, prefix ) logger.info("Index files generation done.\n") - logger.info("Start updating index files to s3 bucket %s", bucket_) + logger.info("Start updating index files to s3 bucket %s", bucket_name) _failed_metas = s3_client.upload_metadatas( meta_file_paths=created_indexes, - target=(bucket_, prefix__), + target=(bucket_name, prefix), product=None, root=top_level ) @@ -407,7 +439,7 @@ def handle_maven_uploading( else: logger.info("Bypass indexing") - upload_post_process(failed_files, failed_metas, prod_key, bucket_) + upload_post_process(failed_files, failed_metas, prod_key, bucket_name) succeeded = succeeded and len(failed_files) <= 0 and len(failed_metas) <= 0 return (tmp_root, succeeded) @@ -418,7 +450,7 @@ def handle_maven_del( prod_key: str, ignore_patterns=None, root="maven-repository", - targets: List[Tuple[str, str, str, str]] = None, + buckets: List[Tuple[str, str, str, str]] = None, aws_profile=None, dir_=None, do_index=True, @@ -433,7 +465,7 @@ def handle_maven_del( need to upload in the tarball * root is a prefix in the tarball to identify which path is the beginning of the maven GAV path - * targets contains the target name with its bucket name and prefix + * buckets contains the target name with its bucket name and prefix for the bucket, which will be used to store artifacts with the prefix. See target definition in Charon configuration for details * dir is base dir for extracting the tarball, will use system @@ -454,21 +486,21 @@ def handle_maven_del( # 3. Delete all valid_paths from s3 logger.debug("Valid poms: %s", valid_poms) succeeded = True - for target in targets: - prefix_ = remove_prefix(target[2], "/") + for bucket in buckets: + prefix = remove_prefix(bucket[2], "/") s3_client = S3Client(aws_profile=aws_profile, dry_run=dry_run) - bucket = target[1] - logger.info("Start deleting files from s3 bucket %s", bucket) + bucket_name = bucket[1] + logger.info("Start deleting files from s3 bucket %s", bucket_name) failed_files = s3_client.delete_files( valid_mvn_paths, - target=(bucket, prefix_), + target=(bucket_name, prefix), product=prod_key, root=top_level ) logger.info("Files deletion done\n") # 4. Delete related manifest from s3 - manifest_folder = target[1] + manifest_folder = bucket[1] logger.info( "Start deleting manifest from s3 bucket %s in folder %s", manifest_bucket_name, manifest_folder @@ -479,25 +511,25 @@ def handle_maven_del( # 5. Use changed GA to scan s3 for metadata refreshment logger.info( "Start generating maven-metadata.xml files for all changed GAs in s3 bucket %s", - bucket + bucket_name ) meta_files = _generate_metadatas( - s3=s3_client, bucket=bucket, + s3=s3_client, bucket=bucket_name, poms=valid_poms, root=top_level, - prefix=prefix_ + prefix=prefix ) logger.info("maven-metadata.xml files generation done\n") # 6. Upload all maven-metadata.xml. We need to delete metadata files # firstly for all affected GA, and then replace the theirs content. - logger.info("Start updating maven-metadata.xml to s3 bucket %s", bucket) + logger.info("Start updating maven-metadata.xml to s3 bucket %s", bucket_name) all_meta_files = [] for _, files in meta_files.items(): all_meta_files.extend(files) s3_client.delete_files( file_paths=all_meta_files, - target=(bucket, prefix_), + target=(bucket_name, prefix), product=None, root=top_level ) @@ -505,7 +537,7 @@ def handle_maven_del( if META_FILE_GEN_KEY in meta_files: _failed_metas = s3_client.upload_metadatas( meta_file_paths=meta_files[META_FILE_GEN_KEY], - target=(bucket, prefix_), + target=(bucket_name, prefix), product=None, root=top_level ) @@ -517,9 +549,9 @@ def handle_maven_del( if os.path.exists(os.path.join(top_level, "archetype-catalog.xml")): logger.info("Start generating archetype-catalog.xml") archetype_action = _generate_rollback_archetype_catalog( - s3=s3_client, bucket=bucket, + s3=s3_client, bucket=bucket_name, root=top_level, - prefix=prefix_ + prefix=prefix ) logger.info("archetype-catalog.xml files generation done\n") @@ -527,10 +559,10 @@ def handle_maven_del( archetype_files = [os.path.join(top_level, ARCHETYPE_CATALOG_FILENAME)] archetype_files.extend(__hash_decorate_metadata(top_level, ARCHETYPE_CATALOG_FILENAME)) if archetype_action < 0: - logger.info("Start updating archetype-catalog.xml to s3 bucket %s", bucket) + logger.info("Start updating archetype-catalog.xml to s3 bucket %s", bucket_name) _failed_metas = s3_client.delete_files( file_paths=archetype_files, - target=(bucket, prefix_), + target=(bucket_name, prefix), product=None, root=top_level ) @@ -539,7 +571,7 @@ def handle_maven_del( elif archetype_action > 0: _failed_metas = s3_client.upload_metadatas( meta_file_paths=archetype_files, - target=(bucket, prefix_), + target=(bucket_name, prefix), product=None, root=top_level ) @@ -550,14 +582,14 @@ def handle_maven_del( if do_index: logger.info("Start generating index files for all changed entries") created_indexes = indexing.generate_indexes( - PACKAGE_TYPE_MAVEN, top_level, valid_dirs, s3_client, bucket, prefix_ + PACKAGE_TYPE_MAVEN, top_level, valid_dirs, s3_client, bucket_name, prefix ) logger.info("Index files generation done.\n") - logger.info("Start updating index to s3 bucket %s", bucket) + logger.info("Start updating index to s3 bucket %s", bucket_name) _failed_index_files = s3_client.upload_metadatas( meta_file_paths=created_indexes, - target=(bucket, prefix_), + target=(bucket_name, prefix), product=None, root=top_level ) @@ -567,7 +599,7 @@ def handle_maven_del( else: logger.info("Bypassing indexing") - rollback_post_process(failed_files, failed_metas, prod_key, bucket) + rollback_post_process(failed_files, failed_metas, prod_key, bucket_name) succeeded = succeeded and len(failed_files) == 0 and len(failed_metas) == 0 return (tmp_root, succeeded) @@ -1012,6 +1044,12 @@ def _handle_error(err_msgs: List[str]): pass +def __get_suffix(package_type: str, conf: CharonConfig) -> List[str]: + if package_type: + return conf.get_ignore_signature_suffix(package_type) + return [] + + class VersionCompareKey: 'Used as key function for version sorting' def __init__(self, obj): diff --git a/charon/pkgs/npm.py b/charon/pkgs/npm.py index 70c45a77..7c160673 100644 --- a/charon/pkgs/npm.py +++ b/charon/pkgs/npm.py @@ -24,6 +24,8 @@ from semantic_version import compare import charon.pkgs.indexing as indexing +import charon.pkgs.signature as signature +from charon.config import CharonConfig, get_config from charon.constants import META_FILE_GEN_KEY, META_FILE_DEL_KEY, PACKAGE_TYPE_NPM from charon.storage import S3Client from charon.utils.archive import extract_npm_tarball @@ -66,10 +68,12 @@ def __init__(self, metadata, is_version): def handle_npm_uploading( tarball_path: str, product: str, - targets: List[Tuple[str, str, str, str]] = None, + buckets: List[Tuple[str, str, str, str]] = None, aws_profile=None, dir_=None, do_index=True, + gen_sign=False, + key=None, dry_run=False, manifest_bucket_name=None ) -> Tuple[str, bool]: @@ -79,7 +83,7 @@ def handle_npm_uploading( * tarball_path is the location of the tarball in filesystem * product is used to identify which product this repo tar belongs to - * targets contains the target name with its bucket name and prefix + * buckets contains the target name with its bucket name and prefix for the bucket, which will be used to store artifacts with the prefix. See target definition in Charon configuration for details * dir_ is base dir for extracting the tarball, will use system @@ -88,22 +92,23 @@ def handle_npm_uploading( Returns the directory used for archive processing and if uploading is successful """ client = S3Client(aws_profile=aws_profile, dry_run=dry_run) - for target in targets: - bucket_ = target[1] - prefix__ = remove_prefix(target[2], "/") - registry__ = target[3] + generated_signs = [] + for bucket in buckets: + bucket_name = bucket[1] + prefix = remove_prefix(bucket[2], "/") + registry = bucket[3] target_dir, valid_paths, package_metadata = _scan_metadata_paths_from_archive( - tarball_path, registry__, prod=product, dir__=dir_ + tarball_path, registry, prod=product, dir__=dir_ ) if not os.path.isdir(target_dir): logger.error("Error: the extracted target_dir path %s does not exist.", target_dir) sys.exit(1) valid_dirs = __get_path_tree(valid_paths, target_dir) - logger.info("Start uploading files to s3 buckets: %s", bucket_) + logger.info("Start uploading files to s3 buckets: %s", bucket_name) failed_files = client.upload_files( file_paths=[valid_paths[0]], - targets=[(bucket_, prefix__)], + targets=[(bucket_name, prefix)], product=product, root=target_dir ) @@ -117,7 +122,7 @@ def handle_npm_uploading( 'uploading\n') else: logger.info("Start uploading manifest to s3 bucket %s", manifest_bucket_name) - manifest_folder = bucket_ + manifest_folder = bucket_name manifest_name, manifest_full_path = write_manifest(valid_paths, target_dir, product) client.upload_manifest( @@ -128,13 +133,13 @@ def handle_npm_uploading( logger.info( "Start generating version-level package.json for package: %s in s3 bucket %s", - package_metadata.name, bucket_ + package_metadata.name, bucket_name ) failed_metas = [] _version_metadata_path = valid_paths[1] _failed_metas = client.upload_metadatas( meta_file_paths=[_version_metadata_path], - target=(bucket_, prefix__), + target=(bucket_name, prefix), product=product, root=target_dir ) @@ -143,36 +148,66 @@ def handle_npm_uploading( logger.info( "Start generating package.json for package: %s in s3 bucket %s", - package_metadata.name, bucket_ + package_metadata.name, bucket_name ) meta_files = _gen_npm_package_metadata_for_upload( - client, bucket_, target_dir, package_metadata, prefix__ + client, bucket_name, target_dir, package_metadata, prefix ) logger.info("package.json generation done\n") if META_FILE_GEN_KEY in meta_files: _failed_metas = client.upload_metadatas( meta_file_paths=[meta_files[META_FILE_GEN_KEY]], - target=(bucket_, prefix__), + target=(bucket_name, prefix), product=None, root=target_dir ) failed_metas.extend(_failed_metas) logger.info("package.json uploading done") + if gen_sign: + conf = get_config() + if not conf: + sys.exit(1) + suffix_list = __get_suffix(PACKAGE_TYPE_NPM, conf) + command = conf.get_detach_signature_command() + artifacts = [s for s in valid_paths if not s.endswith(tuple(suffix_list))] + if META_FILE_GEN_KEY in meta_files: + artifacts.extend(meta_files[META_FILE_GEN_KEY]) + logger.info("Start generating signature for s3 bucket %s\n", bucket_name) + (_failed_metas, _generated_signs) = signature.generate_sign( + PACKAGE_TYPE_NPM, artifacts, + target_dir, prefix, + client, bucket_name, + key, command + ) + failed_metas.extend(_failed_metas) + generated_signs.extend(_generated_signs) + logger.info("Singature generation done.\n") + + logger.info("Start upload singature files to s3 bucket %s\n", bucket_name) + _failed_metas = client.upload_signatures( + meta_file_paths=generated_signs, + target=(bucket_name, prefix), + product=None, + root=target_dir + ) + failed_metas.extend(_failed_metas) + logger.info("Signature uploading done.\n") + # this step generates index.html for each dir and add them to file list # index is similar to metadata, it will be overwritten everytime if do_index: - logger.info("Start generating index files to s3 bucket %s", bucket_) + logger.info("Start generating index files to s3 bucket %s", bucket_name) created_indexes = indexing.generate_indexes( - PACKAGE_TYPE_NPM, target_dir, valid_dirs, client, bucket_, prefix__ + PACKAGE_TYPE_NPM, target_dir, valid_dirs, client, bucket_name, prefix ) logger.info("Index files generation done.\n") - logger.info("Start updating index files to s3 bucket %s", bucket_) + logger.info("Start updating index files to s3 bucket %s", bucket_name) _failed_metas = client.upload_metadatas( meta_file_paths=created_indexes, - target=(bucket_, prefix__), + target=(bucket_name, prefix), product=None, root=target_dir ) @@ -181,7 +216,7 @@ def handle_npm_uploading( else: logger.info("Bypass indexing\n") - upload_post_process(failed_files, failed_metas, product, bucket_) + upload_post_process(failed_files, failed_metas, product, bucket_name) succeeded = succeeded and len(failed_files) == 0 and len(failed_metas) == 0 return (target_dir, succeeded) @@ -190,7 +225,7 @@ def handle_npm_uploading( def handle_npm_del( tarball_path: str, product: str, - targets: List[Tuple[str, str, str, str]] = None, + buckets: List[Tuple[str, str, str, str]] = None, aws_profile=None, dir_=None, do_index=True, @@ -201,7 +236,7 @@ def handle_npm_del( * tarball_path is the location of the tarball in filesystem * product is used to identify which product this repo tar belongs to - * targets contains the target name with its bucket name and prefix + * buckets contains the target name with its bucket name and prefix for the bucket, which will be used to store artifacts with the prefix. See target definition in Charon configuration for details * dir is base dir for extracting the tarball, will use system @@ -217,19 +252,19 @@ def handle_npm_del( client = S3Client(aws_profile=aws_profile, dry_run=dry_run) succeeded = True - for target in targets: - bucket = target[1] - prefix_ = remove_prefix(target[2], "/") - logger.info("Start deleting files from s3 bucket %s", bucket) + for bucket in buckets: + bucket_name = bucket[1] + prefix = remove_prefix(bucket[2], "/") + logger.info("Start deleting files from s3 bucket %s", bucket_name) failed_files = client.delete_files( file_paths=valid_paths, - target=(bucket, prefix_), + target=(bucket_name, prefix), product=product, root=target_dir ) logger.info("Files deletion done\n") if manifest_bucket_name: - manifest_folder = target[1] + manifest_folder = bucket[1] logger.info( "Start deleting manifest from s3 bucket %s in folder %s", manifest_bucket_name, manifest_folder @@ -243,27 +278,27 @@ def handle_npm_del( logger.info( "Start generating package.json for package: %s in bucket %s", - package_name_path, bucket + package_name_path, bucket_name ) meta_files = _gen_npm_package_metadata_for_del( - client, bucket, target_dir, package_name_path, prefix_ + client, bucket_name, target_dir, package_name_path, prefix ) logger.info("package.json generation done\n") - logger.info("Start uploading package.json to s3 bucket %s", bucket) + logger.info("Start uploading package.json to s3 bucket %s", bucket_name) all_meta_files = [] for _, file in meta_files.items(): all_meta_files.append(file) client.delete_files( file_paths=all_meta_files, - target=(bucket, prefix_), + target=(bucket_name, prefix), product=None, root=target_dir ) failed_metas = [] if META_FILE_GEN_KEY in meta_files: _failed_metas = client.upload_metadatas( meta_file_paths=[meta_files[META_FILE_GEN_KEY]], - target=(bucket, prefix_), + target=(bucket_name, prefix), product=None, root=target_dir ) @@ -273,17 +308,17 @@ def handle_npm_del( if do_index: logger.info( "Start generating index files for all changed entries for bucket %s", - bucket + bucket_name ) created_indexes = indexing.generate_indexes( - PACKAGE_TYPE_NPM, target_dir, valid_dirs, client, bucket, prefix_ + PACKAGE_TYPE_NPM, target_dir, valid_dirs, client, bucket_name, prefix ) logger.info("Index files generation done.\n") - logger.info("Start updating index to s3 bucket %s", bucket) + logger.info("Start updating index to s3 bucket %s", bucket_name) _failed_index_files = client.upload_metadatas( meta_file_paths=created_indexes, - target=(bucket, prefix_), + target=(bucket_name, prefix), product=None, root=target_dir ) @@ -292,7 +327,7 @@ def handle_npm_del( else: logger.info("Bypassing indexing\n") - rollback_post_process(failed_files, failed_metas, product, bucket) + rollback_post_process(failed_files, failed_metas, product, bucket_name) succeeded = succeeded and len(failed_files) <= 0 and len(failed_metas) <= 0 return (target_dir, succeeded) @@ -533,3 +568,9 @@ def __get_path_tree(paths: str, prefix: str) -> Set[str]: if dir_.startswith("@"): valid_dirs.add(dir_.split("/")[0]) return valid_dirs + + +def __get_suffix(package_type: str, conf: CharonConfig) -> List[str]: + if package_type: + return conf.get_ignore_signature_suffix(package_type) + return [] diff --git a/charon/pkgs/signature.py b/charon/pkgs/signature.py new file mode 100644 index 00000000..412aeba8 --- /dev/null +++ b/charon/pkgs/signature.py @@ -0,0 +1,131 @@ +""" +Copyright (C) 2022 Red Hat, Inc. (https://github.com/Commonjava/charon) + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +import os +import subprocess +import asyncio +import logging +import shlex +from jinja2 import Template +from typing import Awaitable, Callable, List, Tuple +from charon.storage import S3Client + +logger = logging.getLogger(__name__) + + +def generate_sign( + package_type: str, + artifact_path: List[str], + top_level: str, + prefix: str, + s3_client: S3Client, + bucket: str, + key: str = None, + command: str = None +) -> Tuple[List[str], List[str]]: + """ This Python function generates a digital signature for a list of metadata files using + the GPG library for uploads to an Amazon S3 bucket. + + * Does not regenerate the existing metadata files when existing + * Returning all failed to generate signature files due to exceptions + * key: name of the sign key, using inside template to render correct command, + replace {{ key }} field in command string. + * command: A string representing the subprocess command to run. + + It returns a tuple containing two lists: one with the successfully generated files + and another with the failed to generate files due to exceptions. + """ + + async def sign_file( + filename: str, failed_paths: List[str], generated_signs: List[str], + sem: asyncio.BoundedSemaphore + ): + async with sem: + signature_file = filename + ".asc" + if prefix: + remote = os.path.join(prefix, signature_file) + else: + remote = signature_file + local = os.path.join(top_level, signature_file) + artifact = os.path.join(top_level, filename) + + if not os.path.isfile(os.path.join(prefix, artifact)): + logger.warning("Artifact needs signature is missing, please check again") + return + + # skip sign if file already exist locally + if os.path.isfile(local): + logger.debug(".asc file %s existed, skipping", local) + return + # skip sign if file already exist in bucket + try: + existed = s3_client.file_exists_in_bucket(bucket, remote) + except ValueError as e: + logger.error( + "Error: Can not check signature file status due to: %s", e + ) + return + if existed: + logger.debug(".asc file %s existed, skipping", remote) + return + + run_command = Template(command).render(key=key, file=artifact) + result = await __run_cmd_async(shlex.split(run_command)) + + if result.returncode == 0: + generated_signs.append(local) + logger.debug("Generated signature file: %s", local) + else: + failed_paths.append(local) + + return __do_path_cut_and( + file_paths=artifact_path, + path_handler=sign_file, + root=top_level + ) + + +def __do_path_cut_and( + file_paths: List[str], + path_handler: Callable[[str, List[str], List[str], asyncio.Semaphore], Awaitable[bool]], + root="/" +) -> List[str]: + slash_root = root + if not root.endswith("/"): + slash_root = slash_root + "/" + failed_paths = [] + generated_signs = [] + tasks = [] + sem = asyncio.BoundedSemaphore(10) + for full_path in file_paths: + path = full_path + if path.startswith(slash_root): + path = path[len(slash_root):] + tasks.append( + asyncio.ensure_future( + path_handler(path, failed_paths, generated_signs, sem) + ) + ) + + loop = asyncio.get_event_loop() + loop.run_until_complete(asyncio.gather(*tasks)) + return (failed_paths, generated_signs) + + +async def __run_cmd_async(cmd): + loop = asyncio.get_event_loop() + result = await loop.run_in_executor(None, subprocess.run, cmd) + return result diff --git a/charon/schemas/charon.json b/charon/schemas/charon.json new file mode 100644 index 00000000..bf745f9a --- /dev/null +++ b/charon/schemas/charon.json @@ -0,0 +1,80 @@ +{ + "$schema": "https://json-schema.org/draft-07/schema#", + "title": "charon configuration", + "type": "object", + "properties": { + "ignore_patterns": { + "type": "array", + "description": "Filename patterns to ignore", + "minItems": 1, + "items": { + "type": "string", + "description": "pattern to ignore" + } + }, + "ignore_signature_suffix": { + "type": "object", + "patternProperties": { + "^[a-z].*$": { + "type": "array", + "description": "artifact type", + "minItems": 1, + "items": { + "type": "string", + "description": "sign artifact suffixt - exclude" + } + } + } + }, + "detach_signature_command": { + "type": "string", + "description": "signature command to be used for signature" + }, + "targets": { + "type": "object", + "patternProperties": { + "^[a-z].*$": { + "type": "array", + "description": "charon targets", + "minItems": 1, + "items": { + "type": "object", + "description": "target bucket", + "properties": { + "bucket": { + "description": "bucket name", + "type": "string" + }, + "prefix": { + "description": "prefix for destination path inside the bucket", + "type": "string" + }, + "registry": { + "description": "npm registry", + "type": "string" + } + }, + "required": [ + "bucket" + ], + "additionalProperties": false + } + } + } + }, + "aws_profile": { + "type": "string", + "description": "aws profile to use with S3" + }, + "manifest_bucket": { + "type": "string", + "description": "which bucket to use for storing manifests" + }, + "additionalProperties": false + }, + "additionalProperties": false, + "required": [ + "targets" + ] +} + diff --git a/charon/storage.py b/charon/storage.py index d6177d03..e040b5a6 100644 --- a/charon/storage.py +++ b/charon/storage.py @@ -15,7 +15,6 @@ """ import asyncio import threading -from boto3_type_annotations.s3.service_resource import Object from charon.utils.files import read_sha1 from charon.constants import PROD_INFO_SUFFIX, MANIFEST_SUFFIX @@ -24,7 +23,6 @@ from botocore.exceptions import HTTPClientError from boto3.exceptions import S3UploadFailedError from botocore.config import Config -from boto3_type_annotations import s3 from typing import Any, Awaitable, Callable, Dict, List, Optional, Tuple import os import logging @@ -57,15 +55,15 @@ def __init__( aws_profile=None, extra_conf=None, con_limit=25, dry_run=False ) -> None: - self.__client: s3.ServiceResource = self.__init_aws_client(aws_profile, extra_conf) - self.__buckets: Dict[str, s3.Bucket] = {} + self.__client = self.__init_aws_client(aws_profile, extra_conf) + self.__buckets: Dict[str, Any] = {} self.__dry_run = dry_run self.__con_sem = asyncio.BoundedSemaphore(con_limit) self.__lock = threading.Lock() def __init_aws_client( self, aws_profile=None, extra_conf=None - ) -> s3.ServiceResource: + ): if aws_profile: logger.debug("Using aws profile: %s", aws_profile) s3_session = session.Session(profile_name=aws_profile) @@ -130,7 +128,7 @@ def upload_files( main_bucket = self.__get_bucket(main_bucket_name) key_prefix = main_target[1] extra_targets = targets[1:] if len(targets) > 1 else [] - extra_prefixed_buckets: List[Tuple[s3.Bucket, str]] = [] + extra_prefixed_buckets: List[Tuple[Any, str]] = [] if len(extra_targets) > 0: for target in extra_targets: extra_prefixed_buckets.append((self.__get_bucket(target[0]), target[1])) @@ -153,7 +151,7 @@ async def path_upload_handler( index, total, full_file_path, main_bucket_name ) main_path_key = os.path.join(key_prefix, path) if key_prefix else path - main_file_object: s3.Object = main_bucket.Object(main_path_key) + main_file_object = main_bucket.Object(main_path_key) existed = False try: existed = await self.__run_async(self.__file_exists, main_file_object) @@ -218,7 +216,7 @@ async def path_upload_handler( 'Copyinging %s from bucket %s to bucket %s', full_file_path, main_bucket_name, extra_bucket ) - file_object: s3.Object = extra_bucket.Object(extra_path_key) + file_object = extra_bucket.Object(extra_path_key) existed = await self.__run_async(self.__file_exists, file_object) if not existed: if not self.__dry_run: @@ -285,7 +283,7 @@ async def handle_existed( async def __copy_between_bucket( self, source: str, source_key: str, - target: s3.Bucket, target_key: str + target, target_key: str ) -> bool: logger.debug( "Copying file %s from bucket %s to target %s as %s", @@ -345,7 +343,7 @@ async def path_upload_handler( key_prefix = target[1] path_key = os.path.join(key_prefix, path) if key_prefix else path - file_object: s3.Object = bucket.Object(path_key) + file_object = bucket.Object(path_key) existed = False try: existed = await self.__run_async(self.__file_exists, file_object) @@ -416,6 +414,100 @@ async def path_upload_handler( root=root ) + def upload_signatures( + self, meta_file_paths: List[str], + target: Tuple[str, str], + product: Optional[str] = None, root="/" + ) -> List[str]: + """ Upload a list of signature files to s3 bucket. This function is very similar to + upload_metadata, except: + * The signature files will not be overwritten if existed + """ + bucket_name = target[0] + bucket = self.__get_bucket(bucket_name) + + async def path_upload_handler( + full_file_path: str, path: str, index: int, + total: int, failed: List[str] + ): + async with self.__con_sem: + if not os.path.isfile(full_file_path): + logger.warning( + 'Warning: file %s does not exist during uploading. Product: %s', + full_file_path, product + ) + failed.append(full_file_path) + return + + logger.debug( + '(%d/%d) Updating sginature %s to bucket %s', + index, total, path, bucket_name + ) + + key_prefix = target[1] + path_key = os.path.join(key_prefix, path) if key_prefix else path + file_object = bucket.Object(path_key) + existed = False + try: + existed = await self.__run_async(self.__file_exists, file_object) + except (ClientError, HTTPClientError) as e: + logger.error( + "Error: file existence check failed due to error: %s", e + ) + failed.append(full_file_path) + return + (content_type, _) = mimetypes.guess_type(full_file_path) + if not content_type: + content_type = DEFAULT_MIME_TYPE + + try: + if not self.__dry_run: + if not existed: + await self.__run_async( + functools.partial( + file_object.put, + Body=open(full_file_path, "rb"), + Metadata={}, + ContentType=content_type + ) + ) + elif product: + # NOTE: This should not happen for most cases, as most + # of the metadata file does not have product info. Just + # leave for requirement change in future + # This is now used for npm version-level package.json + prods = [product] + if existed: + (prods, no_error) = await self.__run_async( + self.__get_prod_info, + path_key, bucket_name + ) + if not no_error: + failed.append(full_file_path) + return + if no_error and product not in prods: + prods.append(product) + updated = await self.__update_prod_info( + path_key, bucket_name, prods + ) + if not updated: + failed.append(full_file_path) + return + logger.debug('Updated signature %s to bucket %s', path, bucket_name) + except (ClientError, HTTPClientError) as e: + logger.error( + "ERROR: file %s not uploaded to bucket" + " %s due to error: %s ", + full_file_path, bucket_name, e + ) + failed.append(full_file_path) + + return self.__do_path_cut_and( + file_paths=meta_file_paths, + path_handler=self.__path_handler_count_wrapper(path_upload_handler), + root=root + ) + def upload_manifest( self, manifest_name: str, manifest_full_path: str, target: str, manifest_bucket_name: str @@ -424,7 +516,7 @@ def upload_manifest( path_key = os.path.join(target, manifest_name) manifest_bucket = self.__get_bucket(manifest_bucket_name) try: - file_object: s3.Object = manifest_bucket.Object(path_key) + file_object = manifest_bucket.Object(path_key) file_object.upload_file( Filename=manifest_full_path, ExtraArgs={'ContentType': DEFAULT_MIME_TYPE} @@ -556,7 +648,7 @@ def delete_manifest(self, product_key: str, target: str, manifest_bucket_name: s path_key = os.path.join(target, manifest_name) manifest_bucket = self.__get_bucket(manifest_bucket_name) - file_object: s3.Object = manifest_bucket.Object(path_key) + file_object = manifest_bucket.Object(path_key) existed = False try: existed = self.__file_exists(file_object) @@ -641,7 +733,7 @@ def file_exists_in_bucket( file_object = bucket.Object(path) return self.__file_exists(file_object) - def __get_bucket(self, bucket_name: str) -> s3.Bucket: + def __get_bucket(self, bucket_name: str): self.__lock.acquire() try: bucket = self.__buckets.get(bucket_name) @@ -654,7 +746,7 @@ def __get_bucket(self, bucket_name: str) -> s3.Bucket: finally: self.__lock.release() - def __file_exists(self, file_object: Object) -> bool: + def __file_exists(self, file_object) -> bool: try: file_object.load() return True diff --git a/charon/utils/yaml.py b/charon/utils/yaml.py new file mode 100644 index 00000000..ee9b4a98 --- /dev/null +++ b/charon/utils/yaml.py @@ -0,0 +1,90 @@ +""" +Copyright (C) 2022 Red Hat, Inc. (https://github.com/Commonjava/charon) + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import codecs +import json +import logging + +import jsonschema +import yaml +from pkg_resources import resource_stream + +logger = logging.getLogger(__name__) + + +def read_yaml_from_file_path(file_path, schema, package='charon'): + """ + :param file_path: string, yaml file to read + :param schema: string, file path to the JSON schema + :param package: string, package name containing the schema + """ + with open(file_path) as f: + yaml_data = f.read() + return read_yaml(yaml_data, schema, package) + + +def read_yaml(yaml_data, schema, package=None): + """ + :param yaml_data: string, yaml content + :param schema: string, file path to the JSON schema + :param package: string, package name containing the schema + """ + package = package or 'charon' + data = yaml.safe_load(yaml_data) + schema = load_schema(package, schema) + validate_with_schema(data, schema) + return data + + +def load_schema(package, schema): + """ + :param package: string, package name containing the schema + :param schema: string, file path to the JSON schema + """ + # Read schema from file + try: + resource = resource_stream(package, schema) + schema = codecs.getreader('utf-8')(resource) + except ImportError: + logger.error('Unable to find package %s', package) + raise + except (IOError, TypeError): + logger.error('unable to extract JSON schema, cannot validate') + raise + + # Load schema into Dict + try: + schema = json.load(schema) + except ValueError: + logger.error('unable to decode JSON schema, cannot validate') + raise + return schema + + +def validate_with_schema(data, schema): + """ + :param data: dict, data to be validated + :param schema: dict, schema to validate with + """ + validator = jsonschema.Draft7Validator(schema=schema) + try: + jsonschema.Draft7Validator.check_schema(schema) + validator.validate(data) + except jsonschema.SchemaError: + logger.error('invalid schema, cannot validate') + raise + except jsonschema.ValidationError as exc: + logger.error("schema validation error: %s", exc) + raise diff --git a/config/charon.yaml b/config/charon.yaml new file mode 100644 index 00000000..97c37d4a --- /dev/null +++ b/config/charon.yaml @@ -0,0 +1,32 @@ +ignore_patterns: + - ".*^(redhat).*" + - ".*snapshot.*" + +ignore_signature_suffix: + maven: + - ".sha1" + - ".sha256" + - ".md5" + - "maven-metadata.xml" + - "archtype-catalog.xml" + npm: + - "package.json" + +detach_signature_command: "rpm-sign --detach-sign --key {{ key }} {{ file }}" + +targets: + stage-ga: + - bucket: "stage-maven-ga" + prefix: ga + stage-ea: + - bucket: "stage-maven-ea" + prefix: earlyaccess/all + stage-maven: # collection of stage-ea and stage-ga + - bucket: "stage-maven-ga" + prefix: ga + - bucket: "stage-maven-ea" + prefix: earlyaccess/all + stage-npm: + - bucket: "stage-npm-npmjs" + prefix: / + registry: "npm.stage.registry.redhat.com" diff --git a/config/charon.yml b/config/charon.yml deleted file mode 100644 index ab3bb6e4..00000000 --- a/config/charon.yml +++ /dev/null @@ -1,14 +0,0 @@ -ignore_patterns: - - ".*^(redhat).*" - - ".*snapshot.*" - -targets: - ga: - bucket: "maven-prod-ga" - prefix: ga - ea: - bucket: "maven-prod-ea" - prefix: earlyaccess/all - npm: - bucket: "npm-prod" - prefix: npmjs diff --git a/pytest.ini b/pytest.ini deleted file mode 100644 index fe70ffdf..00000000 --- a/pytest.ini +++ /dev/null @@ -1,3 +0,0 @@ -[pytest] -addopts = -ra --color=auto --html=__pytest_reports/atomic-reactor-unit-tests.html --self-contained-html -render_collapsed = True diff --git a/requirements.txt b/requirements.txt index e63a211a..043083ce 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,9 +2,9 @@ setuptools-rust==1.1.2 jinja2==3.0.3 boto3==1.20.45 botocore==1.23.45 -boto3_type_annotations==0.3.1 click==8.0.3 requests==2.27.1 -ruamel.yaml==0.17.20 +PyYAML==6.0 defusedxml==0.7.1 subresource-integrity==0.2 +jsonschema==3.2.0 diff --git a/setup.py b/setup.py index f26e196e..c9301386 100755 --- a/setup.py +++ b/setup.py @@ -20,7 +20,7 @@ from setuptools import setup, find_packages -version = "1.1.1" +version = "1.1.2" # f = open('README.md') # long_description = f.read().strip() @@ -63,6 +63,7 @@ def _get_requirements(path): license="APLv2", packages=find_packages(exclude=["ez_setup", "examples", "tests"]), install_requires=_get_requirements('requirements.txt'), + package_data={'charon': ['schemas/*.json']}, test_suite="tests", entry_points={ "console_scripts": ["charon = charon:cli"], diff --git a/test-coverage.sh b/test-coverage.sh deleted file mode 100755 index 050feacd..00000000 --- a/test-coverage.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/sh - -pytest --cov charon -v --cov-report html diff --git a/test.sh b/test.sh deleted file mode 100755 index a359cfb7..00000000 --- a/test.sh +++ /dev/null @@ -1,109 +0,0 @@ -#!/bin/bash -set -eux - -# Prepare env vars -ENGINE=${ENGINE:="podman"} -OS=${OS:="centos"} -OS_VERSION=${OS_VERSION:="7"} -PYTHON_VERSION=${PYTHON_VERSION:="3"} -ACTION=${ACTION:="test"} -IMAGE="$OS:$OS_VERSION" -CONTAINER_NAME="charon-$OS-$OS_VERSION-py$PYTHON_VERSION" - -# Use arrays to prevent globbing and word splitting -engine_mounts=(-v "$PWD":"$PWD":z) -for dir in ${EXTRA_MOUNT:-}; do - engine_mounts=("${engine_mounts[@]}" -v "$dir":"$dir":z) -done - -# Create or resurrect container if needed -if [[ $($ENGINE ps -qa -f name="$CONTAINER_NAME" | wc -l) -eq 0 ]]; then - $ENGINE run --name "$CONTAINER_NAME" -d "${engine_mounts[@]}" -w "$PWD" -ti "$IMAGE" sleep infinity -elif [[ $($ENGINE ps -q -f name="$CONTAINER_NAME" | wc -l) -eq 0 ]]; then - echo found stopped existing container, restarting. volume mounts cannot be updated. - $ENGINE container start "$CONTAINER_NAME" -fi - -function setup_charon() { - RUN="$ENGINE exec -i $CONTAINER_NAME" - PYTHON="python$PYTHON_VERSION" - PIP_PKG="$PYTHON-pip" - PIP="pip$PYTHON_VERSION" - PKG="dnf" - PKG_EXTRA=(dnf-plugins-core git "$PYTHON"-pylint) - BUILDDEP=(dnf builddep) - if [[ $OS == "centos" ]]; then - PKG="yum" - PKG_EXTRA=(yum-utils git "$PYTHON"-pylint) - BUILDDEP=(yum-builddep) - ENABLE_REPO= - else - ENABLE_REPO="--enablerepo=updates-testing" - fi - - - PIP_INST=("$PIP" install --index-url "${PYPI_INDEX:-https://pypi.org/simple}") - - if [[ $OS == "centos" ]]; then - # Don't let builddep enable *-source repos since they give 404 errors - $RUN rm -f /etc/yum.repos.d/CentOS-Sources.repo - # $RUN rm -f /etc/yum.repos.d/CentOS-Linux-AppStream.repo - # $RUN rm -f /etc/yum.repos.d/CentOS-Linux-BaseOS.repo - # This has to run *before* we try installing anything from EPEL - $RUN $PKG $ENABLE_REPO install -y epel-release - fi - - # RPM install basic dependencies - $RUN $PKG $ENABLE_REPO install -y "${PKG_EXTRA[@]}" - # RPM install build dependencies for charon - $RUN "${BUILDDEP[@]}" -y charon.spec - - # Install package - $RUN $PKG install -y $PIP_PKG - - # Upgrade pip to provide latest features for successful installation - $RUN "${PIP_INST[@]}" --upgrade pip - - if [[ $OS == centos ]]; then - # Pip install/upgrade setuptools. Older versions of setuptools don't understand the - # environment markers used by docker-squash's requirements, also - # CentOS needs to have setuptools updates to make pytest-cov work - $RUN "${PIP_INST[@]}" --upgrade setuptools - fi - - # install with RPM_PY_SYS=true to avoid error caused by installing on system python - #$RUN sh -c "RPM_PY_SYS=true ${PIP_INST[*]} rpm-py-installer" - # Setuptools install charon from source - $RUN $PYTHON setup.py install - - # Pip install packages for unit tests - $RUN "${PIP_INST[@]}" -r tests/requirements.txt -} - -case ${ACTION} in -"test") - setup_charon - TEST_CMD="coverage run --source=charon -m pytest tests" - ;; -"pylint") - setup_charon - PACKAGES='charon tests' - TEST_CMD="${PYTHON} -m pylint ${PACKAGES}" - ;; -"bandit") - setup_charon - $RUN "${PIP_INST[@]}" bandit - TEST_CMD="bandit-baseline -r charon -ll -ii" - ;; -*) - echo "Unknown action: ${ACTION}" - exit 2 - ;; -esac - -# Run tests -# shellcheck disable=SC2086 -$RUN ${TEST_CMD} "$@" - -echo "To run tests again:" -echo "$RUN ${TEST_CMD}" diff --git a/tests/base.py b/tests/base.py index 4e89c9ac..49cd2f1e 100644 --- a/tests/base.py +++ b/tests/base.py @@ -25,9 +25,9 @@ from charon.pkgs.pkg_utils import is_metadata from charon.storage import PRODUCT_META_KEY, CHECKSUM_META_KEY from tests.commons import TEST_BUCKET, TEST_MANIFEST_BUCKET -from boto3_type_annotations import s3 from moto import mock_s3 +from tests.constants import HERE SHORT_TEST_PREFIX = "ga" LONG_TEST_PREFIX = "earlyaccess/all" @@ -43,18 +43,32 @@ def setUp(self): - ".*^(redhat).*" - ".*snapshot.*" +ignore_signature_suffix: + maven: + - ".sha1" + - ".sha256" + - ".md5" + - "maven-metadata.xml" + - "archtype-catalog.xml" + npm: + - "package.json" + +detach_signature_command: "touch {{ file }}.asc" + targets: ga: - bucket: "charon-test" - prefix: ga + - bucket: "charon-test" + prefix: ga ea: - bucket: "charon-test-ea" - prefix: earlyaccess/all + - bucket: "charon-test-ea" + prefix: earlyaccess/all npm: - bucket: "charon-test-npm" - registry: "npm1.registry.redhat.com" - """ + - bucket: "charon-test-npm" + registry: "npm1.registry.redhat.com" +aws_profile: "test" +manifest_bucket: "manifest" + """ self.prepare_config(config_base, default_config_content) def tearDown(self): @@ -70,7 +84,7 @@ def change_home(self): def __prepare_template(self, config_base): template_path = os.path.join(config_base, 'template') os.mkdir(config_base) - shutil.copytree(os.path.join(os.getcwd(), "template"), template_path) + shutil.copytree(os.path.join(HERE, "../template"), template_path) if not os.path.isdir(template_path): self.fail("Template initilization failed!") @@ -115,7 +129,7 @@ def cleanBuckets(self, buckets: List[str]): def __prepare_s3(self): return boto3.resource('s3') - def check_product(self, file: str, prods: List[str], bucket: s3.Bucket = None, msg=None): + def check_product(self, file: str, prods: List[str], bucket=None, msg=None): prod_file = file + PROD_INFO_SUFFIX test_bucket = bucket if not test_bucket: @@ -128,7 +142,7 @@ def check_product(self, file: str, prods: List[str], bucket: s3.Bucket = None, m msg=msg ) - def check_content(self, objs: List[s3.ObjectSummary], products: List[str], msg=None): + def check_content(self, objs: List, products: List[str], msg=None): for obj in objs: file_obj = obj.Object() test_bucket = self.mock_s3.Bucket(file_obj.bucket_name) diff --git a/tests/commons.py b/tests/commons.py index 2e5f670b..fdb3ae8b 100644 --- a/tests/commons.py +++ b/tests/commons.py @@ -89,6 +89,19 @@ COMMONS_CLIENT_456_INDEX = "org/apache/httpcomponents/httpclient/4.5.6/index.html" COMMONS_LOGGING_INDEX = "commons-logging/commons-logging/index.html" COMMONS_ROOT_INDEX = "index.html" +COMMONS_LOGGING_SIGNS = [ + "commons-logging/commons-logging/1.2/commons-logging-1.2.jar.asc", + "commons-logging/commons-logging/1.2/commons-logging-1.2-sources.jar.asc", + "commons-logging/commons-logging/1.2/commons-logging-1.2.pom.asc", +] +COMMONS_CLIENT_456_SIGNS = [ + "org/apache/httpcomponents/httpclient/4.5.6/httpclient-4.5.6.jar.asc", + "org/apache/httpcomponents/httpclient/4.5.6/httpclient-4.5.6.pom.asc", +] +COMMONS_CLIENT_459_SIGNS = [ + "org/apache/httpcomponents/httpclient/4.5.9/httpclient-4.5.9.jar.asc", + "org/apache/httpcomponents/httpclient/4.5.9/httpclient-4.5.9.pom.asc", +] # For npm diff --git a/tests/constants.py b/tests/constants.py new file mode 100644 index 00000000..2e6d111f --- /dev/null +++ b/tests/constants.py @@ -0,0 +1,19 @@ +""" +Copyright (C) 2022 Red Hat, Inc. (https://github.com/Commonjava/charon) + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import os + +HERE = os.path.dirname(__file__) +INPUTS = os.path.join(HERE, 'input') diff --git a/tests/requirements.txt b/tests/requirements.txt index de68f7b1..af22ba64 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -1,9 +1,9 @@ flexmock>=0.10.6 responses>=0.9.0,<0.10.8 -pytest>=5.0 +pytest<=7.1.3 pytest-cov pytest-html flake8 requests-mock moto==3.0.2.dev12 - +python-gnupg==0.5.0 diff --git a/tests/test_archive.py b/tests/test_archive.py index 9e303028..0e2ac09a 100644 --- a/tests/test_archive.py +++ b/tests/test_archive.py @@ -2,12 +2,14 @@ from charon.utils.archive import NpmArchiveType, detect_npm_archive import os +from tests.constants import INPUTS + class ArchiveTest(BaseTest): def test_detect_package(self): - mvn_tarball = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + mvn_tarball = os.path.join(INPUTS, "commons-client-4.5.6.zip") self.assertEqual(NpmArchiveType.NOT_NPM, detect_npm_archive(mvn_tarball)) - npm_tarball = os.path.join(os.getcwd(), "tests/input/code-frame-7.14.5.tgz") + npm_tarball = os.path.join(INPUTS, "code-frame-7.14.5.tgz") self.assertEqual(NpmArchiveType.TAR_FILE, detect_npm_archive(npm_tarball)) def test_download_archive(self): diff --git a/tests/test_config.py b/tests/test_config.py index 41ce25ad..25bd4649 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -16,8 +16,14 @@ from typing import List import unittest import os + +import pytest +from jsonschema.exceptions import ValidationError + import charon.config as config import re + +from charon.constants import DEFAULT_REGISTRY from tests.base import BaseTest @@ -32,16 +38,16 @@ def test_config(self): self.__base.setUp() conf = config.get_config() self.assertEqual([".*^(redhat).*", ".*snapshot.*"], conf.get_ignore_patterns()) - self.assertEqual('charon-test', conf.get_aws_bucket("ga")) - self.assertEqual('ga', conf.get_bucket_prefix("ga")) - self.assertEqual('charon-test-ea', conf.get_aws_bucket("ea")) - self.assertEqual('earlyaccess/all', conf.get_bucket_prefix("ea")) - self.assertEqual('npm1.registry.redhat.com', conf.get_bucket_registry("npm")) + self.assertEqual([{'bucket': 'charon-test', 'prefix': 'ga'}], conf.get_target('ga')) + self.assertEqual([{'bucket': 'charon-test-ea', 'prefix': 'earlyaccess/all'}], + conf.get_target('ea')) + self.assertEqual([{'bucket': 'charon-test-npm', 'registry': 'npm1.registry.redhat.com'}], + conf.get_target('npm')) def test_no_config(self): self.__base.change_home() - conf = config.get_config() - self.assertIsNone(conf) + with pytest.raises(FileNotFoundError): + config.get_config() def test_config_missing_targets(self): content_missing_targets = """ @@ -50,8 +56,9 @@ def test_config_missing_targets(self): - ".*snapshot.*" """ self.__change_config_content(content_missing_targets) - conf = config.get_config() - self.assertIsNone(conf) + msg = "'targets' is a required property" + with pytest.raises(ValidationError, match=msg): + config.get_config() def test_config_missing_bucket(self): content_missing_targets = """ @@ -61,13 +68,12 @@ def test_config_missing_bucket(self): targets: ga: - prefix: ga + - prefix: ga """ self.__change_config_content(content_missing_targets) - conf = config.get_config() - self.assertIsNotNone(conf) - self.assertEqual("ga", conf.get_bucket_prefix("ga")) - self.assertIsNone(conf.get_aws_bucket("ga")) + msg = "'bucket' is a required property" + with pytest.raises(ValidationError, match=msg): + config.get_config() def test_config_missing_prefix(self): content_missing_targets = """ @@ -77,13 +83,13 @@ def test_config_missing_prefix(self): targets: ga: - bucket: charon-test + - bucket: charon-test """ self.__change_config_content(content_missing_targets) conf = config.get_config() self.assertIsNotNone(conf) - self.assertEqual("charon-test", conf.get_aws_bucket("ga")) - self.assertEqual("", conf.get_bucket_prefix("ga")) + self.assertEqual("charon-test", conf.get_target("ga")[0].get('bucket', '')) + self.assertEqual("", conf.get_target("ga")[0].get('prefix', '')) def test_config_missing_registry(self): content_missing_registry = """ @@ -93,13 +99,13 @@ def test_config_missing_registry(self): targets: npm: - bucket: charon-npm-test + - bucket: charon-npm-test """ self.__change_config_content(content_missing_registry) conf = config.get_config() self.assertIsNotNone(conf) - self.assertEqual("charon-npm-test", conf.get_aws_bucket("npm")) - self.assertEqual("localhost", conf.get_bucket_registry("npm")) + self.assertEqual("charon-npm-test", conf.get_target("npm")[0].get('bucket', '')) + self.assertEqual("localhost", conf.get_target("npm")[0].get('registry', DEFAULT_REGISTRY)) def test_ignore_patterns(self): # pylint: disable=anomalous-backslash-in-string @@ -113,7 +119,7 @@ def test_ignore_patterns(self): targets: ga: - bucket: charon-test + - bucket: charon-test """ self.__change_config_content(content_missing_targets) conf = config.get_config() diff --git a/tests/test_manifest_del.py b/tests/test_manifest_del.py index bea22072..fc5ff35c 100644 --- a/tests/test_manifest_del.py +++ b/tests/test_manifest_del.py @@ -25,6 +25,7 @@ TEST_BUCKET, TEST_MANIFEST_BUCKET, TEST_TARGET, COMMONS_CLIENT_456_MANIFEST, CODE_FRAME_7_14_5_MANIFEST ) +from tests.constants import INPUTS @mock_s3 @@ -38,11 +39,11 @@ def test_maven_manifest_delete(self): self.assertEqual(1, len(manifests)) self.assertIn(COMMONS_CLIENT_456_MANIFEST, manifests) - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product = "commons-client-4.5.6" handle_maven_del( test_zip, product, - targets=[(TEST_TARGET, TEST_BUCKET, None, None)], + buckets=[(TEST_TARGET, TEST_BUCKET, '', '')], dir_=self.tempdir, do_index=False, manifest_bucket_name=TEST_MANIFEST_BUCKET @@ -59,11 +60,11 @@ def test_npm_manifest_delete(self): self.assertEqual(1, len(manifests)) self.assertIn(CODE_FRAME_7_14_5_MANIFEST, manifests) - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.14.5.tgz") + test_tgz = os.path.join(INPUTS, "code-frame-7.14.5.tgz") product = "code-frame-7.14.5" handle_npm_del( test_tgz, product, - targets=[(TEST_TARGET, TEST_BUCKET, None, None)], + buckets=[(TEST_TARGET, TEST_BUCKET, '', '')], dir_=self.tempdir, do_index=False, manifest_bucket_name=TEST_MANIFEST_BUCKET @@ -73,22 +74,22 @@ def test_npm_manifest_delete(self): self.assertEqual(0, len(manifests)) def __prepare_maven_content(self): - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product = "commons-client-4.5.6" handle_maven_uploading( test_zip, product, - targets=[(TEST_TARGET, TEST_BUCKET, None, None)], + buckets=[(TEST_TARGET, TEST_BUCKET, '', '')], dir_=self.tempdir, do_index=False, manifest_bucket_name=TEST_MANIFEST_BUCKET ) def __prepare_npm_content(self): - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.14.5.tgz") + test_tgz = os.path.join(INPUTS, "code-frame-7.14.5.tgz") product = "code-frame-7.14.5" handle_npm_uploading( test_tgz, product, - targets=[(TEST_TARGET, TEST_BUCKET, None, DEFAULT_REGISTRY)], + buckets=[(TEST_TARGET, TEST_BUCKET, '', DEFAULT_REGISTRY)], dir_=self.tempdir, do_index=False, manifest_bucket_name=TEST_MANIFEST_BUCKET diff --git a/tests/test_manifest_upload.py b/tests/test_manifest_upload.py index 0f7251dd..e6aa43e9 100644 --- a/tests/test_manifest_upload.py +++ b/tests/test_manifest_upload.py @@ -26,17 +26,18 @@ COMMONS_CLIENT_META_NUM, COMMONS_CLIENT_456_MANIFEST, COMMONS_CLIENT_456_FILES, COMMONS_LOGGING_FILES, CODE_FRAME_7_14_5_MANIFEST, CODE_FRAME_7_14_5_FILES ) +from tests.constants import INPUTS @mock_s3 class ManifestUploadTest(PackageBaseTest): def test_maven_manifest_upload(self): - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product = "commons-client-4.5.6" handle_maven_uploading( test_zip, product, - targets=[(TEST_TARGET, TEST_BUCKET, None, None)], + buckets=[(TEST_TARGET, TEST_BUCKET, '', '')], dir_=self.tempdir, do_index=False, manifest_bucket_name=TEST_MANIFEST_BUCKET @@ -62,11 +63,11 @@ def test_maven_manifest_upload(self): self.assertIn(f, manifest_content) def test_npm_manifest_upload(self): - test_zip = os.path.join(os.getcwd(), "tests/input/code-frame-7.14.5.tgz") + test_zip = os.path.join(INPUTS, "code-frame-7.14.5.tgz") product = "code-frame-7.14.5" handle_npm_uploading( test_zip, product, - targets=[(TEST_TARGET, TEST_BUCKET, None, DEFAULT_REGISTRY)], + buckets=[(TEST_TARGET, TEST_BUCKET, '', DEFAULT_REGISTRY)], dir_=self.tempdir, do_index=False, manifest_bucket_name=TEST_MANIFEST_BUCKET diff --git a/tests/test_maven_del.py b/tests/test_maven_del.py index 712132ff..c26e6d4a 100644 --- a/tests/test_maven_del.py +++ b/tests/test_maven_del.py @@ -27,6 +27,8 @@ from moto import mock_s3 import os +from tests.constants import INPUTS + @mock_s3 class MavenDeleteTest(PackageBaseTest): @@ -48,12 +50,12 @@ def test_ignore_del(self): product_459 = "commons-client-4.5.9" product_mix = [product_456, product_459] - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") handle_maven_del( test_zip, product_456, ignore_patterns=[".*.sha1"], - targets=[(None, TEST_BUCKET, None, None)], + buckets=[('', TEST_BUCKET, '', '')], dir_=self.tempdir, do_index=False ) @@ -98,11 +100,11 @@ def test_ignore_del(self): def __test_prefix_deletion(self, prefix: str): self.__prepare_content(prefix) - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product_456 = "commons-client-4.5.6" handle_maven_del( test_zip, product_456, - targets=[(None, TEST_BUCKET, prefix, None)], + buckets=[('', TEST_BUCKET, prefix, '')], dir_=self.tempdir, do_index=False ) @@ -173,10 +175,10 @@ def __test_prefix_deletion(self, prefix: str): self.assertIn("1.2", meta_content_logging) self.assertIn("1.2", meta_content_logging) - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.9.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.9.zip") handle_maven_del( test_zip, product_459, - targets=[(None, TEST_BUCKET, prefix, None)], + buckets=[('', TEST_BUCKET, prefix, '')], dir_=self.tempdir, do_index=False ) @@ -185,20 +187,20 @@ def __test_prefix_deletion(self, prefix: str): self.assertEqual(0, len(objs)) def __prepare_content(self, prefix=None): - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product_456 = "commons-client-4.5.6" handle_maven_uploading( test_zip, product_456, - targets=[(None, TEST_BUCKET, prefix, None)], + buckets=[('', TEST_BUCKET, prefix, '')], dir_=self.tempdir, do_index=False ) - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.9.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.9.zip") product_459 = "commons-client-4.5.9" handle_maven_uploading( test_zip, product_459, - targets=[(None, TEST_BUCKET, prefix, None)], + buckets=[('', TEST_BUCKET, prefix, '')], dir_=self.tempdir, do_index=False ) diff --git a/tests/test_maven_del_multi_tgts.py b/tests/test_maven_del_multi_tgts.py index abf324bb..ffc60954 100644 --- a/tests/test_maven_del_multi_tgts.py +++ b/tests/test_maven_del_multi_tgts.py @@ -27,6 +27,8 @@ from moto import mock_s3 import os +from tests.constants import INPUTS + @mock_s3 class MavenDeleteMultiTgtsTest(PackageBaseTest): @@ -58,12 +60,12 @@ def test_ignore_del(self): product_459 = "commons-client-4.5.9" product_mix = [product_456, product_459] - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") handle_maven_del( test_zip, product_456, ignore_patterns=[".*.sha1"], - targets=[(None, TEST_BUCKET, None, None)], + buckets=[('', TEST_BUCKET, '', '')], dir_=self.tempdir, do_index=False ) @@ -108,12 +110,12 @@ def test_ignore_del(self): def __test_prefix_deletion(self, prefix: str): self.__prepare_content(prefix) - targets_ = [(None, TEST_BUCKET, prefix, None), (None, TEST_BUCKET_2, prefix, None)] - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + targets_ = [('', TEST_BUCKET, prefix, ''), ('', TEST_BUCKET_2, prefix, '')] + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product_456 = "commons-client-4.5.6" handle_maven_del( test_zip, product_456, - targets=targets_, + buckets=targets_, dir_=self.tempdir, do_index=False ) @@ -238,10 +240,10 @@ def __test_prefix_deletion(self, prefix: str): msg=f'{bucket_name}' ) - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.9.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.9.zip") handle_maven_del( test_zip, product_459, - targets=targets_, + buckets=targets_, dir_=self.tempdir, do_index=False ) @@ -253,21 +255,21 @@ def __test_prefix_deletion(self, prefix: str): self.assertEqual(0, len(objs), msg=f'{bucket_name}') def __prepare_content(self, prefix=None): - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product_456 = "commons-client-4.5.6" - targets_ = [(None, TEST_BUCKET, prefix, None), (None, TEST_BUCKET_2, prefix, None)] + targets_ = [('', TEST_BUCKET, prefix, ''), ('', TEST_BUCKET_2, prefix, '')] handle_maven_uploading( test_zip, product_456, - targets=targets_, + buckets=targets_, dir_=self.tempdir, do_index=False ) - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.9.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.9.zip") product_459 = "commons-client-4.5.9" handle_maven_uploading( test_zip, product_459, - targets=targets_, + buckets=targets_, dir_=self.tempdir, do_index=False ) diff --git a/tests/test_maven_index.py b/tests/test_maven_index.py index bc8ccb9d..d5647ecd 100644 --- a/tests/test_maven_index.py +++ b/tests/test_maven_index.py @@ -26,16 +26,18 @@ from moto import mock_s3 import os +from tests.constants import INPUTS + @mock_s3 class MavenFileIndexTest(PackageBaseTest): def test_uploading_index(self): - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product = "commons-client-4.5.6" handle_maven_uploading( test_zip, product, - targets=[(None, TEST_BUCKET, None, None)], + buckets=[('', TEST_BUCKET, '', '')], dir_=self.tempdir ) @@ -75,19 +77,19 @@ def test_uploading_index(self): self.assertNotIn(PROD_INFO_SUFFIX, index_content) def test_overlap_upload_index(self): - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product_456 = "commons-client-4.5.6" handle_maven_uploading( test_zip, product_456, - targets=[(None, TEST_BUCKET, None, None)], + buckets=[('', TEST_BUCKET, '', '')], dir_=self.tempdir ) - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.9.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.9.zip") product_459 = "commons-client-4.5.9" handle_maven_uploading( test_zip, product_459, - targets=[(None, TEST_BUCKET, None, None)], + buckets=[('', TEST_BUCKET, '', '')], dir_=self.tempdir ) @@ -135,11 +137,11 @@ def test_upload_index_with_root_prefix(self): self.__test_upload_index_with_prefix("/") def __test_upload_index_with_prefix(self, prefix: str): - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product = "commons-client-4.5.6" handle_maven_uploading( test_zip, product, - targets=[(None, TEST_BUCKET, prefix, None)], + buckets=[('', TEST_BUCKET, prefix, '')], dir_=self.tempdir ) @@ -187,11 +189,11 @@ def __test_upload_index_with_prefix(self, prefix: str): def test_deletion_index(self): self.__prepare_content() - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product_456 = "commons-client-4.5.6" handle_maven_del( test_zip, product_456, - targets=[(None, TEST_BUCKET, None, None)], + buckets=[('', TEST_BUCKET, '', '')], dir_=self.tempdir ) @@ -236,10 +238,10 @@ def test_deletion_index(self): self.assertNotIn(PROD_INFO_SUFFIX, index_content) product_459 = "commons-client-4.5.9" - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.9.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.9.zip") handle_maven_del( test_zip, product_459, - targets=[(None, TEST_BUCKET, None, None)], + buckets=[('', TEST_BUCKET, '', '')], dir_=self.tempdir ) @@ -258,11 +260,11 @@ def test_deletion_index_with_root_prefix(self): def __test_deletion_index_with_prefix(self, prefix: str): self.__prepare_content(prefix) - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product_456 = "commons-client-4.5.6" handle_maven_del( test_zip, product_456, - targets=[(None, TEST_BUCKET, prefix, None)], + buckets=[('', TEST_BUCKET, prefix, '')], dir_=self.tempdir ) @@ -306,10 +308,10 @@ def __test_deletion_index_with_prefix(self, prefix: str): self.assertNotIn("../", index_content) self.assertNotIn(PROD_INFO_SUFFIX, index_content) - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.9.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.9.zip") handle_maven_del( test_zip, product_459, - targets=[(None, TEST_BUCKET, prefix, None)], + buckets=[('', TEST_BUCKET, prefix, '')], dir_=self.tempdir ) @@ -317,18 +319,18 @@ def __test_deletion_index_with_prefix(self, prefix: str): self.assertEqual(0, len(objs)) def __prepare_content(self, prefix=None): - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product_456 = "commons-client-4.5.6" handle_maven_uploading( test_zip, product_456, - targets=[(None, TEST_BUCKET, prefix, None)], + buckets=[('', TEST_BUCKET, prefix, '')], dir_=self.tempdir ) - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.9.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.9.zip") product_459 = "commons-client-4.5.9" handle_maven_uploading( test_zip, product_459, - targets=[(None, TEST_BUCKET, prefix, None)], + buckets=[('', TEST_BUCKET, prefix, '')], dir_=self.tempdir ) diff --git a/tests/test_maven_index_multi_tgts.py b/tests/test_maven_index_multi_tgts.py index 47eb079d..a02707f2 100644 --- a/tests/test_maven_index_multi_tgts.py +++ b/tests/test_maven_index_multi_tgts.py @@ -26,6 +26,8 @@ from moto import mock_s3 import os +from tests.constants import INPUTS + @mock_s3 class MavenFileIndexMultiTgtsTest(PackageBaseTest): @@ -40,12 +42,12 @@ def tearDown(self): super().tearDown() def test_uploading_index(self): - targets_ = [(None, TEST_BUCKET, None, None), (None, TEST_BUCKET_2, None, None)] - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + targets_ = [('', TEST_BUCKET, '', ''), ('', TEST_BUCKET_2, '', '')] + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product = "commons-client-4.5.6" handle_maven_uploading( test_zip, product, - targets=targets_, + buckets=targets_, dir_=self.tempdir ) @@ -100,20 +102,20 @@ def test_uploading_index(self): self.assertNotIn(PROD_INFO_SUFFIX, index_content, msg=f'{bucket_name}') def test_overlap_upload_index(self): - targets_ = [(None, TEST_BUCKET, None, None), (None, TEST_BUCKET_2, None, None)] - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + targets_ = [('', TEST_BUCKET, '', ''), ('', TEST_BUCKET_2, '', '')] + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product_456 = "commons-client-4.5.6" handle_maven_uploading( test_zip, product_456, - targets=targets_, + buckets=targets_, dir_=self.tempdir ) - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.9.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.9.zip") product_459 = "commons-client-4.5.9" handle_maven_uploading( test_zip, product_459, - targets=targets_, + buckets=targets_, dir_=self.tempdir ) @@ -188,12 +190,12 @@ def test_upload_index_with_root_prefix(self): self.__test_upload_index_with_prefix("/") def __test_upload_index_with_prefix(self, prefix: str): - targets_ = [(None, TEST_BUCKET, prefix, None), (None, TEST_BUCKET_2, prefix, None)] - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + targets_ = [('', TEST_BUCKET, prefix, ''), ('', TEST_BUCKET_2, prefix, '')] + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product = "commons-client-4.5.6" handle_maven_uploading( test_zip, product, - targets=targets_, + buckets=targets_, dir_=self.tempdir ) @@ -256,11 +258,11 @@ def __test_upload_index_with_prefix(self, prefix: str): def test_deletion_index(self): self.__prepare_content() - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product_456 = "commons-client-4.5.6" handle_maven_del( test_zip, product_456, - targets=[(None, TEST_BUCKET, None, None)], + buckets=[('', TEST_BUCKET, '', '')], dir_=self.tempdir ) @@ -305,10 +307,10 @@ def test_deletion_index(self): self.assertNotIn(PROD_INFO_SUFFIX, index_content) product_459 = "commons-client-4.5.9" - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.9.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.9.zip") handle_maven_del( test_zip, product_459, - targets=[(None, TEST_BUCKET, None, None)], + buckets=[('', TEST_BUCKET, '', '')], dir_=self.tempdir ) @@ -326,12 +328,12 @@ def test_deletion_index_with_root_prefix(self): def __test_deletion_index_with_prefix(self, prefix: str): self.__prepare_content(prefix) - targets_ = [(None, TEST_BUCKET, prefix, None), (None, TEST_BUCKET_2, prefix, None)] - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + targets_ = [('', TEST_BUCKET, prefix, ''), ('', TEST_BUCKET_2, prefix, '')] + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product_456 = "commons-client-4.5.6" handle_maven_del( test_zip, product_456, - targets=targets_, + buckets=targets_, dir_=self.tempdir ) @@ -397,10 +399,10 @@ def __test_deletion_index_with_prefix(self, prefix: str): ) self.assertNotIn(PROD_INFO_SUFFIX, index_content, msg=f'{bucket_name}') - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.9.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.9.zip") handle_maven_del( test_zip, product_459, - targets=targets_, + buckets=targets_, dir_=self.tempdir ) @@ -411,19 +413,19 @@ def __test_deletion_index_with_prefix(self, prefix: str): self.assertEqual(0, len(objs), msg=f'{bucket_name}') def __prepare_content(self, prefix=None): - targets_ = [(None, TEST_BUCKET, prefix, None), (None, TEST_BUCKET_2, prefix, None)] - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + targets_ = [('', TEST_BUCKET, prefix, ''), ('', TEST_BUCKET_2, prefix, '')] + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product_456 = "commons-client-4.5.6" handle_maven_uploading( test_zip, product_456, - targets=targets_, + buckets=targets_, dir_=self.tempdir ) - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.9.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.9.zip") product_459 = "commons-client-4.5.9" handle_maven_uploading( test_zip, product_459, - targets=targets_, + buckets=targets_, dir_=self.tempdir ) diff --git a/tests/test_maven_meta.py b/tests/test_maven_meta.py index 7c86c6c4..a905c17d 100644 --- a/tests/test_maven_meta.py +++ b/tests/test_maven_meta.py @@ -21,6 +21,7 @@ import charon.pkgs.maven as mvn import charon.utils.archive as archive from tests.base import BaseTest +from tests.constants import INPUTS class MavenMetadataTest(BaseTest): @@ -58,7 +59,7 @@ def test_parse_gavs(self): def test_gen_meta_file(self): test_zip = zipfile.ZipFile( - os.path.join(os.getcwd(), "tests/input/commons-lang3.zip") + os.path.join(INPUTS, "commons-lang3.zip") ) temp_root = os.path.join(self.tempdir, "tmp_zip") os.mkdir(temp_root) diff --git a/tests/test_maven_sign.py b/tests/test_maven_sign.py new file mode 100644 index 00000000..41cab15e --- /dev/null +++ b/tests/test_maven_sign.py @@ -0,0 +1,96 @@ +""" +Copyright (C) 2022 Red Hat, Inc. (https://github.com/Commonjava/charon) + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +from charon.pkgs.maven import handle_maven_uploading +from tests.base import PackageBaseTest +from tests.commons import ( + TEST_BUCKET, COMMONS_CLIENT_456_SIGNS, COMMONS_LOGGING_SIGNS, COMMONS_CLIENT_456_INDEX, + COMMONS_CLIENT_459_SIGNS +) +from moto import mock_s3 +import os + +from tests.constants import INPUTS + + +@mock_s3 +class MavenFileSignTest(PackageBaseTest): + + def test_uploading_sign(self): + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") + product = "commons-client-4.5.6" + handle_maven_uploading( + test_zip, product, + buckets=[('', TEST_BUCKET, '', '')], + dir_=self.tempdir, + gen_sign=True, + key="random" + ) + + test_bucket = self.mock_s3.Bucket(TEST_BUCKET) + objs = list(test_bucket.objects.all()) + actual_files = [obj.key for obj in objs] + + self.assertEqual(46, len(actual_files)) + + for f in COMMONS_LOGGING_SIGNS: + self.assertIn(f, actual_files) + + for f in COMMONS_CLIENT_456_SIGNS: + self.assertIn(f, actual_files) + + indedx_obj = test_bucket.Object(COMMONS_CLIENT_456_INDEX) + index_content = str(indedx_obj.get()["Body"].read(), "utf-8") + self.assertIn( + "httpclient-4.5.6.jar.asc", + index_content + ) + + def test_overlap_upload_index(self): + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") + product_456 = "commons-client-4.5.6" + handle_maven_uploading( + test_zip, product_456, + buckets=[('', TEST_BUCKET, '', '')], + dir_=self.tempdir, + gen_sign=True, + key="random" + ) + + test_zip = os.path.join(INPUTS, "commons-client-4.5.9.zip") + product_459 = "commons-client-4.5.9" + handle_maven_uploading( + test_zip, product_459, + buckets=[('', TEST_BUCKET, '', '')], + dir_=self.tempdir, + gen_sign=True, + key="random" + ) + + test_bucket = self.mock_s3.Bucket(TEST_BUCKET) + objs = list(test_bucket.objects.all()) + actual_files = [obj.key for obj in objs] + + self.assertEqual(57, len(objs)) + + for f in COMMONS_LOGGING_SIGNS: + self.assertIn(f, actual_files) + + for f in COMMONS_CLIENT_456_SIGNS: + self.assertIn(f, actual_files) + + for f in COMMONS_CLIENT_459_SIGNS: + self.assertIn(f, actual_files) diff --git a/tests/test_maven_upload.py b/tests/test_maven_upload.py index b6165850..431475a8 100644 --- a/tests/test_maven_upload.py +++ b/tests/test_maven_upload.py @@ -26,6 +26,8 @@ from moto import mock_s3 import os +from tests.constants import INPUTS + @mock_s3 class MavenUploadTest(PackageBaseTest): @@ -42,19 +44,19 @@ def test_root_prefix_upload(self): self.__test_prefix_upload("/") def test_overlap_upload(self): - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product_456 = "commons-client-4.5.6" handle_maven_uploading( test_zip, product_456, - targets=[(None, TEST_BUCKET, None, None)], + buckets=[('', TEST_BUCKET, '', '')], dir_=self.tempdir, do_index=False ) - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.9.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.9.zip") product_459 = "commons-client-4.5.9" handle_maven_uploading( test_zip, product_459, - targets=[(None, TEST_BUCKET, None, None)], + buckets=[('', TEST_BUCKET, '', '')], dir_=self.tempdir, do_index=False ) @@ -109,11 +111,11 @@ def test_overlap_upload(self): self.assertIn("org.apache.httpcomponents", cat_content) def test_ignore_upload(self): - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product_456 = "commons-client-4.5.6" handle_maven_uploading( test_zip, product_456, [".*.sha1"], - targets=[(None, TEST_BUCKET, None, None)], + buckets=[('', TEST_BUCKET, '', '')], dir_=self.tempdir, do_index=False ) @@ -138,11 +140,11 @@ def test_ignore_upload(self): self.assertNotIn(f, actual_files) def __test_prefix_upload(self, prefix: str): - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product = "commons-client-4.5.6" handle_maven_uploading( test_zip, product, - targets=[(None, TEST_BUCKET, prefix, None)], + buckets=[('', TEST_BUCKET, prefix, '')], dir_=self.tempdir, do_index=False ) diff --git a/tests/test_maven_upload_multi_tgts.py b/tests/test_maven_upload_multi_tgts.py index 85aea608..ffb41d20 100644 --- a/tests/test_maven_upload_multi_tgts.py +++ b/tests/test_maven_upload_multi_tgts.py @@ -27,6 +27,8 @@ from moto import mock_s3 import os +from tests.constants import INPUTS + @mock_s3 class MavenUploadMultiTgtsTest(PackageBaseTest): @@ -42,40 +44,40 @@ def tearDown(self): def test_fresh_upload(self): self.__test_prefix_upload( - [(None, TEST_BUCKET, ""), (None, TEST_BUCKET_2, "", None)] + [('', TEST_BUCKET, ""), ('', TEST_BUCKET_2, "", '')] ) def test_short_prefix_upload(self): self.__test_prefix_upload( - [(None, TEST_BUCKET, SHORT_TEST_PREFIX), (None, TEST_BUCKET_2, SHORT_TEST_PREFIX, None)] + [('', TEST_BUCKET, SHORT_TEST_PREFIX), ('', TEST_BUCKET_2, SHORT_TEST_PREFIX, '')] ) def test_long_prefix_upload(self): self.__test_prefix_upload( - [(None, TEST_BUCKET, LONG_TEST_PREFIX), (None, TEST_BUCKET_2, LONG_TEST_PREFIX, None)] + [('', TEST_BUCKET, LONG_TEST_PREFIX), ('', TEST_BUCKET_2, LONG_TEST_PREFIX, '')] ) def test_root_prefix_upload(self): - self.__test_prefix_upload([(None, TEST_BUCKET, "/", None), - (None, TEST_BUCKET_2, "/", None)]) + self.__test_prefix_upload([('', TEST_BUCKET, "/", ''), + ('', TEST_BUCKET_2, "/", '')]) def test_overlap_upload(self): - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product_456 = "commons-client-4.5.6" targets_ = [ - (None, TEST_BUCKET, None, None), (None, TEST_BUCKET_2, None, None) + ('', TEST_BUCKET, '', ''), ('', TEST_BUCKET_2, '', '') ] handle_maven_uploading( test_zip, product_456, - targets=targets_, + buckets=targets_, dir_=self.tempdir, do_index=False ) - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.9.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.9.zip") product_459 = "commons-client-4.5.9" handle_maven_uploading( test_zip, product_459, - targets=targets_, + buckets=targets_, dir_=self.tempdir, do_index=False ) @@ -178,14 +180,14 @@ def test_overlap_upload(self): ) def test_ignore_upload(self): - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product_456 = "commons-client-4.5.6" targets_ = [ - (None, TEST_BUCKET, None, None), (None, TEST_BUCKET_2, None, None) + ('', TEST_BUCKET, '', ''), ('', TEST_BUCKET_2, '', '') ] handle_maven_uploading( test_zip, product_456, [".*.sha1"], - targets=targets_, + buckets=targets_, dir_=self.tempdir, do_index=False ) @@ -216,11 +218,11 @@ def test_ignore_upload(self): self.assertNotIn(f, actual_files, msg=f'{bucket_name}') def __test_prefix_upload(self, targets: List[Tuple[str, str, str, str]]): - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product = "commons-client-4.5.6" handle_maven_uploading( test_zip, product, - targets=targets, + buckets=targets, dir_=self.tempdir, do_index=False ) diff --git a/tests/test_npm_del.py b/tests/test_npm_del.py index b2b1e332..ad2b1f8e 100644 --- a/tests/test_npm_del.py +++ b/tests/test_npm_del.py @@ -20,6 +20,7 @@ from charon.storage import CHECKSUM_META_KEY from tests.base import LONG_TEST_PREFIX, SHORT_TEST_PREFIX, PackageBaseTest from tests.commons import TEST_BUCKET, CODE_FRAME_7_14_5_FILES, CODE_FRAME_META +from tests.constants import INPUTS @mock_s3 @@ -39,11 +40,11 @@ def test_npm_deletion_with_root_prefix(self): def __test_prefix(self, prefix: str = None): self.__prepare_content(prefix) - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.14.5.tgz") + test_tgz = os.path.join(INPUTS, "code-frame-7.14.5.tgz") product_7_14_5 = "code-frame-7.14.5" handle_npm_del( test_tgz, product_7_14_5, - targets=[(None, TEST_BUCKET, prefix, None)], + buckets=[('', TEST_BUCKET, prefix, '')], dir_=self.tempdir, do_index=False ) @@ -84,28 +85,28 @@ def __test_prefix(self, prefix: str = None): self.assertIn("\"license\": \"MIT\"", meta_content_client) self.assertIn("\"dist_tags\": {\"latest\": \"7.15.8\"}", meta_content_client) - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.15.8.tgz") + test_tgz = os.path.join(INPUTS, "code-frame-7.15.8.tgz") handle_npm_del( test_tgz, product_7_15_8, - targets=[(None, TEST_BUCKET, prefix, None)], + buckets=[('', TEST_BUCKET, prefix, '')], dir_=self.tempdir, do_index=False ) objs = list(test_bucket.objects.all()) self.assertEqual(0, len(objs)) def __prepare_content(self, prefix: str = None): - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.14.5.tgz") + test_tgz = os.path.join(INPUTS, "code-frame-7.14.5.tgz") product_7_14_5 = "code-frame-7.14.5" handle_npm_uploading( test_tgz, product_7_14_5, - targets=[(None, TEST_BUCKET, prefix, DEFAULT_REGISTRY)], + buckets=[('', TEST_BUCKET, prefix, DEFAULT_REGISTRY)], dir_=self.tempdir, do_index=False ) - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.15.8.tgz") + test_tgz = os.path.join(INPUTS, "code-frame-7.15.8.tgz") product_7_15_8 = "code-frame-7.15.8" handle_npm_uploading( test_tgz, product_7_15_8, - targets=[(None, TEST_BUCKET, prefix, DEFAULT_REGISTRY)], + buckets=[('', TEST_BUCKET, prefix, DEFAULT_REGISTRY)], dir_=self.tempdir, do_index=False ) diff --git a/tests/test_npm_del_multi_tgts.py b/tests/test_npm_del_multi_tgts.py index 918d168a..1746fba5 100644 --- a/tests/test_npm_del_multi_tgts.py +++ b/tests/test_npm_del_multi_tgts.py @@ -20,6 +20,7 @@ from charon.storage import CHECKSUM_META_KEY from tests.base import LONG_TEST_PREFIX, SHORT_TEST_PREFIX, PackageBaseTest from tests.commons import TEST_BUCKET, CODE_FRAME_7_14_5_FILES, CODE_FRAME_META, TEST_BUCKET_2 +from tests.constants import INPUTS @mock_s3 @@ -48,12 +49,12 @@ def test_npm_deletion_with_root_prefix(self): def __test_prefix(self, prefix: str = None): self.__prepare_content(prefix) - targets_ = [(None, TEST_BUCKET, prefix, None), (None, TEST_BUCKET_2, prefix, None)] - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.14.5.tgz") + targets_ = [('', TEST_BUCKET, prefix, ''), ('', TEST_BUCKET_2, prefix, '')] + test_tgz = os.path.join(INPUTS, "code-frame-7.14.5.tgz") product_7_14_5 = "code-frame-7.14.5" handle_npm_del( test_tgz, product_7_14_5, - targets=targets_, + buckets=targets_, dir_=self.tempdir, do_index=False ) @@ -120,10 +121,10 @@ def __test_prefix(self, prefix: str = None): meta_content_client, msg=f'{bucket_name}' ) - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.15.8.tgz") + test_tgz = os.path.join(INPUTS, "code-frame-7.15.8.tgz") handle_npm_del( test_tgz, product_7_15_8, - targets=targets_, + buckets=targets_, dir_=self.tempdir, do_index=False ) for target in targets_: @@ -133,20 +134,20 @@ def __test_prefix(self, prefix: str = None): self.assertEqual(0, len(objs)) def __prepare_content(self, prefix: str = None): - targets_ = [(None, TEST_BUCKET, prefix, DEFAULT_REGISTRY), - (None, TEST_BUCKET_2, prefix, DEFAULT_REGISTRY)] - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.14.5.tgz") + targets_ = [('', TEST_BUCKET, prefix, DEFAULT_REGISTRY), + ('', TEST_BUCKET_2, prefix, DEFAULT_REGISTRY)] + test_tgz = os.path.join(INPUTS, "code-frame-7.14.5.tgz") product_7_14_5 = "code-frame-7.14.5" handle_npm_uploading( test_tgz, product_7_14_5, - targets=targets_, + buckets=targets_, dir_=self.tempdir, do_index=False ) - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.15.8.tgz") + test_tgz = os.path.join(INPUTS, "code-frame-7.15.8.tgz") product_7_15_8 = "code-frame-7.15.8" handle_npm_uploading( test_tgz, product_7_15_8, - targets=targets_, + buckets=targets_, dir_=self.tempdir, do_index=False ) diff --git a/tests/test_npm_dist_gen.py b/tests/test_npm_dist_gen.py index e7721e41..438cc094 100644 --- a/tests/test_npm_dist_gen.py +++ b/tests/test_npm_dist_gen.py @@ -23,6 +23,7 @@ TEST_BUCKET, TEST_BUCKET_2, CODE_FRAME_META, CODE_FRAME_7_14_5_META ) +from tests.constants import INPUTS @mock_s3 @@ -33,12 +34,12 @@ def setUp(self): self.test_bucket_2 = self.mock_s3.Bucket(TEST_BUCKET_2) def test_dist_gen_in_single_target(self): - targets_ = [(None, TEST_BUCKET, None, "npm1.registry.redhat.com")] - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.14.5.tgz") + targets_ = [('', TEST_BUCKET, '', "npm1.registry.redhat.com")] + test_tgz = os.path.join(INPUTS, "code-frame-7.14.5.tgz") product_7_14_5 = "code-frame-7.14.5" handle_npm_uploading( test_tgz, product_7_14_5, - targets=targets_, + buckets=targets_, dir_=self.tempdir, do_index=False ) test_bucket = self.mock_s3.Bucket(TEST_BUCKET) @@ -75,13 +76,13 @@ def test_dist_gen_in_single_target(self): "+vOtCS5ndmJicPJhKAwYRI6UfFw==\"", merged_meta_content_client) def test_dist_gen_in_multi_targets(self): - targets_ = [(None, TEST_BUCKET, None, "npm1.registry.redhat.com"), - (None, TEST_BUCKET_2, None, "npm2.registry.redhat.com")] - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.14.5.tgz") + targets_ = [('', TEST_BUCKET, '', "npm1.registry.redhat.com"), + ('', TEST_BUCKET_2, '', "npm2.registry.redhat.com")] + test_tgz = os.path.join(INPUTS, "code-frame-7.14.5.tgz") product_7_14_5 = "code-frame-7.14.5" handle_npm_uploading( test_tgz, product_7_14_5, - targets=targets_, + buckets=targets_, dir_=self.tempdir, do_index=False ) test_bucket_1 = self.mock_s3.Bucket(TEST_BUCKET) @@ -111,12 +112,12 @@ def test_dist_gen_in_multi_targets(self): "-frame-7.14.5.tgz\"", merged_meta_content_client) def test_overlapping_registry_dist_gen(self): - targets_ = [(None, TEST_BUCKET, None, "npm1.registry.redhat.com")] - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.14.5.tgz") + targets_ = [('', TEST_BUCKET, '', "npm1.registry.redhat.com")] + test_tgz = os.path.join(INPUTS, "code-frame-7.14.5.tgz") product_7_14_5 = "code-frame-7.14.5" handle_npm_uploading( test_tgz, product_7_14_5, - targets=targets_, + buckets=targets_, dir_=self.tempdir, do_index=False ) test_bucket = self.mock_s3.Bucket(TEST_BUCKET) @@ -132,12 +133,12 @@ def test_overlapping_registry_dist_gen(self): self.assertIn("\"tarball\": \"https://npm1.registry.redhat.com/@babel/code-frame/-/code" "-frame-7.14.5.tgz\"", merged_meta_content_client) - targets_overlapping_ = [(None, TEST_BUCKET, None, "npm1.overlapping.registry.redhat.com")] - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.14.5.tgz") + targets_overlapping_ = [('', TEST_BUCKET, '', "npm1.overlapping.registry.redhat.com")] + test_tgz = os.path.join(INPUTS, "code-frame-7.14.5.tgz") product_7_14_5 = "code-frame-7.14.5" handle_npm_uploading( test_tgz, product_7_14_5, - targets=targets_overlapping_, + buckets=targets_overlapping_, dir_=self.tempdir, do_index=False ) diff --git a/tests/test_npm_index.py b/tests/test_npm_index.py index 31a0e71c..fa0ebc3a 100644 --- a/tests/test_npm_index.py +++ b/tests/test_npm_index.py @@ -24,6 +24,8 @@ from moto import mock_s3 import os +from tests.constants import INPUTS + NAMESPACE_BABEL_INDEX = "@babel/index.html" @@ -42,11 +44,11 @@ def test_uploding_index_with_root_prefix(self): self.__test_upload_prefix("/") def __test_upload_prefix(self, prefix: str = None): - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.14.5.tgz") + test_tgz = os.path.join(INPUTS, "code-frame-7.14.5.tgz") product_7_14_5 = "code-frame-7.14.5" handle_npm_uploading( test_tgz, product_7_14_5, - targets=[(None, TEST_BUCKET, prefix, DEFAULT_REGISTRY)], + buckets=[('', TEST_BUCKET, prefix, DEFAULT_REGISTRY)], dir_=self.tempdir, ) @@ -122,11 +124,11 @@ def test_deletion_index_with_root_prefix(self): def __test_deletion_prefix(self, prefix: str = None): self.__prepare_content(prefix) - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.14.5.tgz") + test_tgz = os.path.join(INPUTS, "code-frame-7.14.5.tgz") product_7_14_5 = "code-frame-7.14.5" handle_npm_del( test_tgz, product_7_14_5, - targets=[(None, TEST_BUCKET, prefix, None)], + buckets=[('', TEST_BUCKET, prefix, '')], dir_=self.tempdir ) @@ -154,10 +156,10 @@ def __test_deletion_prefix(self, prefix: str = None): self.assertNotIn(PROD_INFO_SUFFIX, index_content) product_7_15_8 = "code-frame-7.15.8" - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.15.8.tgz") + test_tgz = os.path.join(INPUTS, "code-frame-7.15.8.tgz") handle_npm_del( test_tgz, product_7_15_8, - targets=[(None, TEST_BUCKET, prefix, None)], + buckets=[('', TEST_BUCKET, prefix, '')], dir_=self.tempdir ) @@ -165,18 +167,18 @@ def __test_deletion_prefix(self, prefix: str = None): self.assertEqual(0, len(objs)) def __prepare_content(self, prefix: str = None): - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.14.5.tgz") + test_tgz = os.path.join(INPUTS, "code-frame-7.14.5.tgz") product_7_14_5 = "code-frame-7.14.5" handle_npm_uploading( test_tgz, product_7_14_5, - targets=[(None, TEST_BUCKET, prefix, DEFAULT_REGISTRY)], + buckets=[('', TEST_BUCKET, prefix, DEFAULT_REGISTRY)], dir_=self.tempdir ) - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.15.8.tgz") + test_tgz = os.path.join(INPUTS, "code-frame-7.15.8.tgz") product_7_15_8 = "code-frame-7.15.8" handle_npm_uploading( test_tgz, product_7_15_8, - targets=[(None, TEST_BUCKET, prefix, DEFAULT_REGISTRY)], + buckets=[('', TEST_BUCKET, prefix, DEFAULT_REGISTRY)], dir_=self.tempdir ) diff --git a/tests/test_npm_index_multi_tgts.py b/tests/test_npm_index_multi_tgts.py index 65f3e206..ef653303 100644 --- a/tests/test_npm_index_multi_tgts.py +++ b/tests/test_npm_index_multi_tgts.py @@ -25,6 +25,8 @@ from moto import mock_s3 import os +from tests.constants import INPUTS + NAMESPACE_BABEL_INDEX = "@babel/index.html" @@ -53,13 +55,13 @@ def test_uploding_index_with_root_prefix(self): self.__test_upload_prefix("/") def __test_upload_prefix(self, prefix: str = None): - targets_ = [(None, TEST_BUCKET, prefix, DEFAULT_REGISTRY), - (None, TEST_BUCKET_2, prefix, DEFAULT_REGISTRY)] - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.14.5.tgz") + targets_ = [('', TEST_BUCKET, prefix, DEFAULT_REGISTRY), + ('', TEST_BUCKET_2, prefix, DEFAULT_REGISTRY)] + test_tgz = os.path.join(INPUTS, "code-frame-7.14.5.tgz") product_7_14_5 = "code-frame-7.14.5" handle_npm_uploading( test_tgz, product_7_14_5, - targets=targets_, + buckets=targets_, dir_=self.tempdir, ) @@ -118,7 +120,7 @@ def __test_upload_prefix(self, prefix: str = None): def test_overlap_upload_index(self): self.__prepare_content() - targets_ = [(None, TEST_BUCKET, None), (None, TEST_BUCKET_2, None)] + targets_ = [('', TEST_BUCKET, ''), ('', TEST_BUCKET_2, '')] for target in targets_: bucket_name = target[1] bucket = self.mock_s3.Bucket(bucket_name) @@ -162,12 +164,12 @@ def test_deletion_index_with_root_prefix(self): def __test_deletion_prefix(self, prefix: str = None): self.__prepare_content(prefix) - targets_ = [(None, TEST_BUCKET, prefix, None), (None, TEST_BUCKET_2, prefix, None)] - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.14.5.tgz") + targets_ = [('', TEST_BUCKET, prefix, ''), ('', TEST_BUCKET_2, prefix, '')] + test_tgz = os.path.join(INPUTS, "code-frame-7.14.5.tgz") product_7_14_5 = "code-frame-7.14.5" handle_npm_del( test_tgz, product_7_14_5, - targets=targets_, + buckets=targets_, dir_=self.tempdir ) @@ -206,10 +208,10 @@ def __test_deletion_prefix(self, prefix: str = None): self.assertNotIn(PROD_INFO_SUFFIX, index_content, msg=f'{bucket_name}') product_7_15_8 = "code-frame-7.15.8" - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.15.8.tgz") + test_tgz = os.path.join(INPUTS, "code-frame-7.15.8.tgz") handle_npm_del( test_tgz, product_7_15_8, - targets=targets_, + buckets=targets_, dir_=self.tempdir ) @@ -220,20 +222,20 @@ def __test_deletion_prefix(self, prefix: str = None): self.assertEqual(0, len(objs), msg=f'{bucket_name}') def __prepare_content(self, prefix: str = None): - targets_ = [(None, TEST_BUCKET, prefix, DEFAULT_REGISTRY), - (None, TEST_BUCKET_2, prefix, DEFAULT_REGISTRY)] - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.14.5.tgz") + targets_ = [('', TEST_BUCKET, prefix, DEFAULT_REGISTRY), + ('', TEST_BUCKET_2, prefix, DEFAULT_REGISTRY)] + test_tgz = os.path.join(INPUTS, "code-frame-7.14.5.tgz") product_7_14_5 = "code-frame-7.14.5" handle_npm_uploading( test_tgz, product_7_14_5, - targets=targets_, + buckets=targets_, dir_=self.tempdir ) - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.15.8.tgz") + test_tgz = os.path.join(INPUTS, "code-frame-7.15.8.tgz") product_7_15_8 = "code-frame-7.15.8" handle_npm_uploading( test_tgz, product_7_15_8, - targets=targets_, + buckets=targets_, dir_=self.tempdir ) diff --git a/tests/test_npm_meta.py b/tests/test_npm_meta.py index 6737a9e4..df660492 100644 --- a/tests/test_npm_meta.py +++ b/tests/test_npm_meta.py @@ -22,6 +22,7 @@ from charon.storage import S3Client from charon.constants import DEFAULT_REGISTRY from tests.base import BaseTest +from tests.constants import INPUTS MY_BUCKET = "npm_bucket" @@ -63,13 +64,10 @@ def test_handle_npm_uploading_for_old_version(self): Key='@redhat/kogito-tooling-workspace/package.json', Body=str(original_version_0_5_8_package_json) ) - tarball_test_path = os.path.join( - os.getcwd(), - 'tests/input/kogito-tooling-workspace-0.9.0-3.tgz' - ) + tarball_test_path = os.path.join(INPUTS, 'kogito-tooling-workspace-0.9.0-3.tgz') handle_npm_uploading( tarball_test_path, "kogito-tooling-workspace-0.9.0-3", - targets=[(None, MY_BUCKET, None, DEFAULT_REGISTRY)], + buckets=[('', MY_BUCKET, '', DEFAULT_REGISTRY)], dir_=self.tempdir ) (files, _) = self.s3_client.get_files( @@ -116,13 +114,10 @@ def test_handle_npm_uploading_for_new_version(self): Key='@redhat/kogito-tooling-workspace/package.json', Body=str(original_version_1_0_1_package_json) ) - tarball_test_path = os.path.join( - os.getcwd(), - 'tests/input/kogito-tooling-workspace-0.9.0-3.tgz' - ) + tarball_test_path = os.path.join(INPUTS, 'kogito-tooling-workspace-0.9.0-3.tgz') handle_npm_uploading( tarball_test_path, "kogito-tooling-workspace-0.9.0-3", - targets=[(None, MY_BUCKET, None, DEFAULT_REGISTRY)], + buckets=[('', MY_BUCKET, '', DEFAULT_REGISTRY)], dir_=self.tempdir ) (files, _) = self.s3_client.get_files( diff --git a/tests/test_npm_upload.py b/tests/test_npm_upload.py index 9bbbb861..1130b4d0 100644 --- a/tests/test_npm_upload.py +++ b/tests/test_npm_upload.py @@ -26,6 +26,7 @@ TEST_BUCKET, CODE_FRAME_7_14_5_FILES, CODE_FRAME_7_15_8_FILES, CODE_FRAME_META ) +from tests.constants import INPUTS @mock_s3 @@ -44,18 +45,18 @@ def test_upload_with_root_prefix(self): self.__test_prefix("/") def test_double_uploads(self): - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.14.5.tgz") + test_tgz = os.path.join(INPUTS, "code-frame-7.14.5.tgz") product_7_14_5 = "code-frame-7.14.5" handle_npm_uploading( test_tgz, product_7_14_5, - targets=[(None, TEST_BUCKET, None, DEFAULT_REGISTRY)], + buckets=[('', TEST_BUCKET, '', DEFAULT_REGISTRY)], dir_=self.tempdir, do_index=False ) - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.15.8.tgz") + test_tgz = os.path.join(INPUTS, "code-frame-7.15.8.tgz") product_7_15_8 = "code-frame-7.15.8" handle_npm_uploading( test_tgz, product_7_15_8, - targets=[(None, TEST_BUCKET, None, DEFAULT_REGISTRY)], + buckets=[('', TEST_BUCKET, '', DEFAULT_REGISTRY)], dir_=self.tempdir, do_index=False ) test_bucket = self.mock_s3.Bucket(TEST_BUCKET) @@ -88,11 +89,11 @@ def test_double_uploads(self): self.assertIn("\"dist_tags\": {\"latest\": \"7.15.8\"}", meta_content_client) def __test_prefix(self, prefix: str = None): - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.14.5.tgz") + test_tgz = os.path.join(INPUTS, "code-frame-7.14.5.tgz") product_7_14_5 = "code-frame-7.14.5" handle_npm_uploading( test_tgz, product_7_14_5, - targets=[(None, TEST_BUCKET, prefix, DEFAULT_REGISTRY)], + buckets=[('', TEST_BUCKET, prefix, DEFAULT_REGISTRY)], dir_=self.tempdir, do_index=False ) diff --git a/tests/test_npm_upload_multi_tgts.py b/tests/test_npm_upload_multi_tgts.py index 3a3b7aa5..d95868bd 100644 --- a/tests/test_npm_upload_multi_tgts.py +++ b/tests/test_npm_upload_multi_tgts.py @@ -26,6 +26,7 @@ TEST_BUCKET, CODE_FRAME_7_14_5_FILES, CODE_FRAME_7_15_8_FILES, CODE_FRAME_META, TEST_BUCKET_2 ) +from tests.constants import INPUTS @mock_s3 @@ -53,20 +54,20 @@ def test_upload_with_root_prefix(self): self.__test_prefix("/") def test_double_uploads(self): - targets_ = [(None, TEST_BUCKET, None, DEFAULT_REGISTRY), - (None, TEST_BUCKET_2, None, DEFAULT_REGISTRY)] - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.14.5.tgz") + targets_ = [('', TEST_BUCKET, '', DEFAULT_REGISTRY), + ('', TEST_BUCKET_2, '', DEFAULT_REGISTRY)] + test_tgz = os.path.join(INPUTS, "code-frame-7.14.5.tgz") product_7_14_5 = "code-frame-7.14.5" handle_npm_uploading( test_tgz, product_7_14_5, - targets=targets_, + buckets=targets_, dir_=self.tempdir, do_index=False ) - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.15.8.tgz") + test_tgz = os.path.join(INPUTS, "code-frame-7.15.8.tgz") product_7_15_8 = "code-frame-7.15.8" handle_npm_uploading( test_tgz, product_7_15_8, - targets=targets_, + buckets=targets_, dir_=self.tempdir, do_index=False ) @@ -124,13 +125,13 @@ def test_double_uploads(self): ) def __test_prefix(self, prefix: str = None): - targets_ = [(None, TEST_BUCKET, prefix, DEFAULT_REGISTRY), - (None, TEST_BUCKET_2, prefix, DEFAULT_REGISTRY)] - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.14.5.tgz") + targets_ = [('', TEST_BUCKET, prefix, DEFAULT_REGISTRY), + ('', TEST_BUCKET_2, prefix, DEFAULT_REGISTRY)] + test_tgz = os.path.join(INPUTS, "code-frame-7.14.5.tgz") product_7_14_5 = "code-frame-7.14.5" handle_npm_uploading( test_tgz, product_7_14_5, - targets=targets_, + buckets=targets_, dir_=self.tempdir, do_index=False ) diff --git a/tests/test_pkgs_dryrun.py b/tests/test_pkgs_dryrun.py index 8eff2b36..7f2b004e 100644 --- a/tests/test_pkgs_dryrun.py +++ b/tests/test_pkgs_dryrun.py @@ -21,15 +21,17 @@ from moto import mock_s3 import os +from tests.constants import INPUTS + @mock_s3 class PkgsDryRunTest(PackageBaseTest): def test_maven_upload_dry_run(self): - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product = "commons-client-4.5.6" handle_maven_uploading( test_zip, product, - targets=[(None, TEST_BUCKET, None, None)], + buckets=[('', TEST_BUCKET, '', '')], dir_=self.tempdir, dry_run=True ) @@ -41,11 +43,11 @@ def test_maven_upload_dry_run(self): def test_maven_delete_dry_run(self): self.__prepare_maven_content() - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product_456 = "commons-client-4.5.6" handle_maven_del( test_zip, product_456, - targets=[(None, TEST_BUCKET, None, None)], + buckets=[('', TEST_BUCKET, '', '')], dir_=self.tempdir, dry_run=True ) @@ -55,11 +57,11 @@ def test_maven_delete_dry_run(self): self.assertEqual(50, len(objs)) def test_npm_upload_dry_run(self): - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.14.5.tgz") + test_tgz = os.path.join(INPUTS, "code-frame-7.14.5.tgz") product_7_14_5 = "code-frame-7.14.5" handle_npm_uploading( test_tgz, product_7_14_5, - targets=[(None, TEST_BUCKET, None, DEFAULT_REGISTRY)], + buckets=[('', TEST_BUCKET, '', DEFAULT_REGISTRY)], dir_=self.tempdir, dry_run=True ) @@ -71,11 +73,11 @@ def test_npm_upload_dry_run(self): def test_npm_deletion_dry_run(self): self.__prepare_npm_content() - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.14.5.tgz") + test_tgz = os.path.join(INPUTS, "code-frame-7.14.5.tgz") product_7_14_5 = "code-frame-7.14.5" handle_npm_del( test_tgz, product_7_14_5, - targets=[(None, TEST_BUCKET, None, None)], + buckets=[('', TEST_BUCKET, '', '')], dir_=self.tempdir, dry_run=True ) @@ -85,35 +87,35 @@ def test_npm_deletion_dry_run(self): self.assertEqual(11, len(objs)) def __prepare_maven_content(self): - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.6.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product_456 = "commons-client-4.5.6" handle_maven_uploading( test_zip, product_456, - targets=[(None, TEST_BUCKET, None, None)], + buckets=[('', TEST_BUCKET, '', '')], dir_=self.tempdir ) - test_zip = os.path.join(os.getcwd(), "tests/input/commons-client-4.5.9.zip") + test_zip = os.path.join(INPUTS, "commons-client-4.5.9.zip") product_459 = "commons-client-4.5.9" handle_maven_uploading( test_zip, product_459, - targets=[(None, TEST_BUCKET, None, None)], + buckets=[('', TEST_BUCKET, '', '')], dir_=self.tempdir ) def __prepare_npm_content(self): - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.14.5.tgz") + test_tgz = os.path.join(INPUTS, "code-frame-7.14.5.tgz") product_7_14_5 = "code-frame-7.14.5" handle_npm_uploading( test_tgz, product_7_14_5, - targets=[(None, TEST_BUCKET, None, DEFAULT_REGISTRY)], + buckets=[('', TEST_BUCKET, '', DEFAULT_REGISTRY)], dir_=self.tempdir ) - test_tgz = os.path.join(os.getcwd(), "tests/input/code-frame-7.15.8.tgz") + test_tgz = os.path.join(INPUTS, "code-frame-7.15.8.tgz") product_7_15_8 = "code-frame-7.15.8" handle_npm_uploading( test_tgz, product_7_15_8, - targets=[(None, TEST_BUCKET, None, DEFAULT_REGISTRY)], + buckets=[('', TEST_BUCKET, '', DEFAULT_REGISTRY)], dir_=self.tempdir ) diff --git a/tests/test_s3client.py b/tests/test_s3client.py index dde8ab49..1c78db2b 100644 --- a/tests/test_s3client.py +++ b/tests/test_s3client.py @@ -14,7 +14,6 @@ limitations under the License. """ from typing import List -from boto3_type_annotations import s3 from charon.storage import S3Client, CHECKSUM_META_KEY from charon.utils.archive import extract_zip_all from charon.utils.files import overwrite_file, read_sha1 @@ -27,6 +26,7 @@ import zipfile import shutil +from tests.constants import INPUTS MY_BUCKET = "my_bucket" MY_PREFIX = "mock_folder" @@ -149,11 +149,11 @@ def test_upload_and_delete_files(self): bucket = self.mock_s3.Bucket(MY_BUCKET) # test upload existed files with the product. The product will be added to metadata self.s3_client.upload_files( - all_files, targets=[(MY_BUCKET, None)], + all_files, targets=[(MY_BUCKET, '')], product="apache-commons", root=root ) - def content_check(products: List[str], objs: List[s3.ObjectSummary]): + def content_check(products: List[str], objs: List): self.assertEqual(COMMONS_LANG3_ZIP_ENTRY, len(objs)) for o in objs: obj = o.Object() @@ -171,7 +171,7 @@ def content_check(products: List[str], objs: List[s3.ObjectSummary]): # test upload existed files with extra product. The extra product will be added to metadata self.s3_client.upload_files( - all_files, targets=[(MY_BUCKET, None)], + all_files, targets=[(MY_BUCKET, '')], product="commons-lang3", root=root ) objects = list(bucket.objects.all()) @@ -179,14 +179,14 @@ def content_check(products: List[str], objs: List[s3.ObjectSummary]): # test delete files with one product. The file will not be deleted, but the product will # be removed from metadata. - self.s3_client.delete_files(all_files, target=(MY_BUCKET, None), product="apache-commons", + self.s3_client.delete_files(all_files, target=(MY_BUCKET, ''), product="apache-commons", root=root) objects = list(bucket.objects.all()) content_check(["commons-lang3"], objects) # test delete files with left product. The file will be deleted, because all products # have been removed from metadata. - self.s3_client.delete_files(all_files, target=(MY_BUCKET, None), product="commons-lang3", + self.s3_client.delete_files(all_files, target=(MY_BUCKET, ''), product="commons-lang3", root=root) self.assertEqual(0, len(list(bucket.objects.all()))) @@ -231,7 +231,7 @@ def test_upload_file_with_checksum(self): overwrite_file(file, content1) sha1_1 = read_sha1(file) self.s3_client.upload_files( - [file], targets=[(MY_BUCKET, None)], + [file], targets=[(MY_BUCKET, '')], product="foo-bar-1.0", root=temp_root ) objects = list(bucket.objects.all()) @@ -252,7 +252,7 @@ def test_upload_file_with_checksum(self): sha1_2 = read_sha1(file) self.assertNotEqual(sha1_1, sha1_2) self.s3_client.upload_files( - [file], targets=[(MY_BUCKET, None)], + [file], targets=[(MY_BUCKET, '')], product="foo-bar-1.0-2", root=temp_root ) objects = list(bucket.objects.all()) @@ -290,7 +290,7 @@ def test_upload_metadata_with_checksum(self): overwrite_file(file, content1) sha1_1 = read_sha1(file) self.s3_client.upload_metadatas( - [file], target=(MY_BUCKET, None), root=temp_root + [file], target=(MY_BUCKET, ''), root=temp_root ) objects = list(bucket.objects.all()) self.assertEqual(1, len(objects)) @@ -305,7 +305,7 @@ def test_upload_metadata_with_checksum(self): self.assertEqual(sha1_1, sha1_1_repeated) self.s3_client.upload_metadatas( [file], - target=(MY_BUCKET, None), + target=(MY_BUCKET, ''), root=temp_root, ) objects = list(bucket.objects.all()) @@ -335,7 +335,7 @@ def test_upload_metadata_with_checksum(self): sha1_2 = read_sha1(file) self.assertNotEqual(sha1_1, sha1_2) self.s3_client.upload_metadatas( - [file], target=(MY_BUCKET, None), root=temp_root + [file], target=(MY_BUCKET, ''), root=temp_root ) objects = list(bucket.objects.all()) self.assertEqual(1, len(objects)) @@ -361,7 +361,7 @@ def test_failed_paths(self): shutil.rmtree(root) failed_paths = self.s3_client.upload_files( - all_files, targets=[(MY_BUCKET, None)], + all_files, targets=[(MY_BUCKET, '')], product="apache-commons", root=temp_root ) @@ -370,7 +370,7 @@ def test_failed_paths(self): def test_exists_override_failing(self): (temp_root, _, all_files) = self.__prepare_files() failed_paths = self.s3_client.upload_files( - all_files, targets=[(MY_BUCKET, None)], + all_files, targets=[(MY_BUCKET, '')], product="apache-commons", root=temp_root ) self.assertEqual(0, len(failed_paths)) @@ -383,7 +383,7 @@ def test_exists_override_failing(self): sha1_changed = read_sha1(all_files[0]) self.assertNotEqual(sha1, sha1_changed) failed_paths = self.s3_client.upload_files( - all_files, targets=[(MY_BUCKET, None)], + all_files, targets=[(MY_BUCKET, '')], product="apache-commons-2", root=temp_root ) bucket = self.mock_s3.Bucket(MY_BUCKET) @@ -392,7 +392,7 @@ def test_exists_override_failing(self): def __prepare_files(self): test_zip = zipfile.ZipFile( - os.path.join(os.getcwd(), "tests/input/commons-lang3.zip") + os.path.join(INPUTS, "commons-lang3.zip") ) temp_root = os.path.join(self.tempdir, "tmp_zip") os.mkdir(temp_root) diff --git a/tests/test_util.py b/tests/test_util.py index 584920c2..35c9deff 100644 --- a/tests/test_util.py +++ b/tests/test_util.py @@ -17,10 +17,12 @@ import os import unittest +from tests.constants import INPUTS + class UtilTest(unittest.TestCase): def test_digest(self): - test_file = os.path.join(os.getcwd(), "tests/input/commons-lang3.zip") + test_file = os.path.join(INPUTS, "commons-lang3.zip") self.assertEqual("bd4fe0a8111df64430b6b419a91e4218ddf44734", digest(test_file)) self.assertEqual( "61ff1d38cfeb281b05fcd6b9a2318ed47cd62c7f99b8a9d3e819591c03fe6804", @@ -28,7 +30,7 @@ def test_digest(self): ) def test_read_sha1(self): - test_file = os.path.join(os.getcwd(), "tests/input/commons-lang3.zip") + test_file = os.path.join(INPUTS, "commons-lang3.zip") # read the real sha1 hash self.assertEqual("bd4fe0a8111df64430b6b419a91e4218ddf44734", digest(test_file)) # read hash from .sha1 file @@ -37,5 +39,5 @@ def test_read_sha1(self): ) # For .sha1 file itself, will use digest directly - test_file = os.path.join(os.getcwd(), "tests/input/commons-lang3.zip.sha1") + test_file = os.path.join(INPUTS, "commons-lang3.zip.sha1") self.assertEqual(digest(test_file), read_sha1(test_file)) diff --git a/tests/utils/test_yaml.py b/tests/utils/test_yaml.py new file mode 100644 index 00000000..cb36cd14 --- /dev/null +++ b/tests/utils/test_yaml.py @@ -0,0 +1,202 @@ +""" +Copyright (C) 2022 Red Hat, Inc. (https://github.com/Commonjava/charon) + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +from __future__ import absolute_import + +import json +import os + +import jsonschema +import pkg_resources +import pytest +import yaml +from flexmock import flexmock + +from charon.utils.yaml import (read_yaml, + read_yaml_from_file_path, + load_schema, + validate_with_schema) + + +def test_read_yaml_file_ioerrors(tmpdir): + config_path = os.path.join(str(tmpdir), 'nosuchfile.yaml') + with pytest.raises(IOError): + read_yaml_from_file_path(config_path, 'schemas/nosuchfile.json') + + +@pytest.mark.parametrize('from_file', [True, False]) +@pytest.mark.parametrize('config', [ + ("""\ + targets: + ga: + - bucket: test_bucket + """), +]) +def test_read_yaml_file_or_yaml(tmpdir, from_file, config): + expected = yaml.safe_load(config) + + if from_file: + config_path = os.path.join(str(tmpdir), 'config.yaml') + with open(config_path, 'w') as fp: + fp.write(config) + output = read_yaml_from_file_path(config_path, 'schemas/charon.json') + else: + output = read_yaml(config, 'schemas/charon.json') + + assert output == expected + + +def test_read_yaml_bad_package(caplog): + with pytest.raises(ImportError): + read_yaml("", 'schemas/charon.json', package='bad_package') + assert 'Unable to find package bad_package' in caplog.text + + +def test_read_yaml_file_bad_extract(tmpdir, caplog): + class FakeProvider(object): + def get_resource_stream(self, pkg, rsc): + raise IOError + + # pkg_resources.resource_stream() cannot be mocked directly + # Instead mock the module-level function it calls. + (flexmock(pkg_resources) + .should_receive('get_provider') + .and_return(FakeProvider())) + + config_path = os.path.join(str(tmpdir), 'config.yaml') + with open(config_path, 'w'): + pass + + with pytest.raises(IOError): + read_yaml_from_file_path(config_path, 'schemas/charon.json') + assert "unable to extract JSON schema, cannot validate" in caplog.text + + +def test_read_yaml_file_bad_decode(tmpdir, caplog): + (flexmock(json) + .should_receive('load') + .and_raise(ValueError)) + + config_path = os.path.join(str(tmpdir), 'config.yaml') + with open(config_path, 'w'): + pass + + with pytest.raises(ValueError): + read_yaml_from_file_path(config_path, 'schemas/charon.json') + assert "unable to decode JSON schema, cannot validate" in caplog.text + + +@pytest.mark.parametrize(('config', 'expected'), [ + ("""\ + ignore_patterns: + - test """, + "'targets' is a required property"), + ("""\ + tests: ga """, + "Additional properties are not allowed ('tests' was unexpected)"), +]) +def test_read_yaml_validation_error(config, expected, caplog): + with pytest.raises(jsonschema.ValidationError) as exc_info: + read_yaml(config, 'schemas/charon.json') + + assert "schema validation error" in caplog.text + assert expected in str(exc_info.value) + + +@pytest.mark.parametrize(('package', 'package_pass'), [ + ('charon', True), + ('FOO', False) +]) +def test_load_schema_package(package, package_pass, caplog): + schema = 'schemas/charon.json' + if not package_pass: + with pytest.raises(ImportError): + load_schema(package, schema) + assert "Unable to find package FOO" in caplog.text + else: + assert isinstance(load_schema(package, schema), dict) + + +@pytest.mark.parametrize(('schema', 'schema_pass'), [ + ('schemas/charon.json', True), + ('schemas/charon.json', False) +]) +def test_load_schema_schema(schema, schema_pass, caplog): + package = 'charon' + if not schema_pass: + (flexmock(json) + .should_receive('load') + .and_raise(ValueError)) + with pytest.raises(ValueError): + load_schema(package, schema) + assert "unable to decode JSON schema, cannot validate" in caplog.text + else: + assert isinstance(load_schema(package, schema), dict) + + +@pytest.mark.parametrize(('config', 'validation_pass', 'expected'), [ + ({ + 'name': 1 + }, False, + "1 is not of type 'string" + ), + ( + { + 'name': 'foo', + 'module': 'bar' + }, + False, + "'module' was unexpected", + ), ({ + 'name': 'foo' + }, True, '') +]) +def test_validate_with_schema_validation(config, validation_pass, expected, caplog): + schema = { + 'type': 'object', + 'required': ['name'], + 'properties': { + 'name': { + 'type': 'string' + } + }, + 'additionalProperties': False + } + if not validation_pass: + with pytest.raises(jsonschema.ValidationError) as exc_info: + validate_with_schema(config, schema) + assert 'schema validation error' in caplog.text + assert expected in str(exc_info.value) + else: + validate_with_schema(config, schema) + assert expected == '' + + +def test_validate_with_schema_bad_schema(caplog): + config = { + 'name': 'foo' + } + schema = { + 'type': 'bakagaki', # Nonexistent type + 'properties': { + 'name': { + 'type': 'string' + } + } + } + with pytest.raises(jsonschema.SchemaError): + validate_with_schema(config, schema) + assert 'invalid schema, cannot validate' in caplog.text diff --git a/tox.ini b/tox.ini new file mode 100644 index 00000000..218f0a61 --- /dev/null +++ b/tox.ini @@ -0,0 +1,40 @@ +[tox] +envlist = test,flake8,pylint,bandit + +[testenv] +basepython=python3 +skip_install = true + +[testenv:test] +sitepackages = true +deps = -r requirements-dev.txt +commands = python3 -m pytest --cov=charon {posargs:"tests"} + +[testenv:pylint] +deps = pylint==2.9.6 +commands = python3 -m pylint charon tests + +[testenv:flake8] +deps = flake8 +commands = python3 -m flake8 charon tests + +[testenv:bandit] +deps = bandit +commands = bandit-baseline -r charon -ll -ii + +[testenv:mypy] +deps = mypy==0.910 +commands = + mypy \ + --install-types \ + --non-interactive \ + --ignore-missing-imports \ + --package {posargs:"charon"} + +[coverage:report] +skip_covered = true +sort = Cover + +[pytest] +addopts = -ra --color=auto --html=__pytest_reports/charon-unit-tests.html --self-contained-html +render_collapsed = True From cb492959da07aec203ed83d008f153265fe020f7 Mon Sep 17 00:00:00 2001 From: Gang Li Date: Wed, 28 Aug 2024 09:45:56 +0800 Subject: [PATCH 07/23] Use fixed image tag instead of floating latest for Container file --- image/Containerfile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/image/Containerfile b/image/Containerfile index 6ee0c02c..267cb9f0 100644 --- a/image/Containerfile +++ b/image/Containerfile @@ -19,16 +19,16 @@ # 4. Start using uploader # charon upload/delete from /home/charon/upload/... ### -FROM registry.access.redhat.com/ubi8-minimal:latest as builder +FROM registry.access.redhat.com/ubi8-minimal:8.10-1052 as builder -ARG GIT_BRANCH=main +ARG GIT_BRANCH=release RUN microdnf install -y git-core python3.12 python3.12-pip && microdnf clean all RUN git clone -b ${GIT_BRANCH} --depth 1 https://github.com/Commonjava/charon.git RUN pip3 install --no-cache-dir --upgrade pip RUN pip3 wheel ./charon -FROM registry.access.redhat.com/ubi8-minimal:latest +FROM registry.access.redhat.com/ubi8-minimal:8.10-1052 ARG USER=charon ARG UID=10000 From 8795a047c6473b1b0d5f3456a40b87a8eec26957 Mon Sep 17 00:00:00 2001 From: Gang Li Date: Tue, 17 Dec 2024 09:11:05 +0800 Subject: [PATCH 08/23] chore: add --version flag to support version check Signed-off-by: Gang Li --- charon/cmd/__init__.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/charon/cmd/__init__.py b/charon/cmd/__init__.py index 16a0129d..e2f54677 100644 --- a/charon/cmd/__init__.py +++ b/charon/cmd/__init__.py @@ -13,7 +13,7 @@ See the License for the specific language governing permissions and limitations under the License. """ -from click import group +from click import group, version_option, pass_context from charon.cmd.cmd_upload import upload from charon.cmd.cmd_delete import delete from charon.cmd.cmd_index import index @@ -22,7 +22,9 @@ @group() -def cli(): +@version_option() +@pass_context +def cli(ctx): """Charon is a tool to synchronize several types of artifacts repository data to Red Hat Ronda service (maven.repository.redhat.com). From 945c763a2048061b836064fe8ec3cd84d6db02d0 Mon Sep 17 00:00:00 2001 From: Gang Li Date: Fri, 9 May 2025 16:31:30 +0800 Subject: [PATCH 09/23] Fix mmeng-4362: re-sort the indexing page items --- charon/pkgs/indexing.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/charon/pkgs/indexing.py b/charon/pkgs/indexing.py index 4d50e036..4710cdab 100644 --- a/charon/pkgs/indexing.py +++ b/charon/pkgs/indexing.py @@ -23,7 +23,7 @@ from jinja2 import Template import os import logging -from typing import List, Set, Dict +from typing import List, Dict from charon.utils.strings import remove_prefix @@ -48,7 +48,7 @@ def __get_index_template(package_type: str) -> str: class IndexedHTML(object): # object for holding index html file data - def __init__(self, title: str, header: str, items: Set[str]): + def __init__(self, title: str, header: str, items: List[str]): self.title = title self.header = header self.items = items @@ -174,8 +174,8 @@ def __to_html_content(package_type: str, contents: List[str], folder: str) -> st items = temp_items else: items.extend(contents) - items_set = set(__sort_index_items(items)) - index = IndexedHTML(title=folder, header=folder, items=items_set) + items_result = list(filter(lambda c: c.strip(), __sort_index_items(set(items)))) + index = IndexedHTML(title=folder, header=folder, items=items_result) return index.generate_index_file_content(package_type) @@ -303,8 +303,8 @@ def re_index( real_contents.append(c) else: real_contents = contents - logger.debug(real_contents) index_content = __to_html_content(package_type, real_contents, path) + logger.debug("The re-indexed page content: %s", index_content) if not dry_run: index_path = os.path.join(path, "index.html") if path == "/": From 96758640a32c8ec41376699067611508a8e7ce0a Mon Sep 17 00:00:00 2001 From: Gang Li Date: Mon, 12 May 2025 13:57:22 +0800 Subject: [PATCH 10/23] Fix pip warning: add pyproject.toml --- pyproject.toml | 108 +++++++++++++++++++++++++++++++++++++++++ tests/requirements.txt | 1 - 2 files changed, 108 insertions(+), 1 deletion(-) create mode 100644 pyproject.toml diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..43ab9cb4 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,108 @@ +[build-system] +build-backend = "setuptools.build_meta" +requires = ["setuptools", "setuptools-scm"] + +[project] +name = "charon" +version = "1.3.3" +authors = [ + {name = "RedHat EXD SPMM"}, +] +readme = "README.md" +keywords = ["charon", "mrrc", "maven", "npm", "build", "java"] +license-files = ["LICENSE"] +requires-python = ">=3.9" +classifiers = [ + "Development Status :: 1 - Planning", + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Topic :: Software Development :: Build Tools", + "Topic :: Utilities", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", +] +dependencies = [ + "Jinja2>=3.1.3", + "boto3>=1.18.35", + "botocore>=1.21.35", + "click>=8.1.3", + "requests>=2.25.0", + "PyYAML>=5.4.1", + "defusedxml>=0.7.1", + "subresource-integrity>=0.2", + "jsonschema>=4.9.1", + "urllib3>=1.25.10", + "semantic-version>=2.10.0" +] + +[project.optional-dependencies] +dev = [ + "pylint", + "flake8", + "pep8", + "mypy", + "tox", +] +test = [ + "flexmock>=0.10.6", + "responses>=0.9.0", + "pytest<=7.1.3", + "pytest-cov", + "pytest-html", + "requests-mock", + "moto>=5.0.16,<6", + "python-gnupg>=0.5.0,<1" +] + +[project.scripts] +charon = "charon.cmd:cli" + +[tool.setuptools] +packages = ["charon"] + +[tool.setuptools_scm] +fallback_version = "1.3.4+dev.fallback" + +[tool.setuptools.package-data] +charon = ["schemas/*.json"] + +[tool.mypy] +python_version = "3.9" + +[tool.coverage.report] +skip_covered = true +show_missing = true +fail_under = 90 +exclude_lines = [ + "def __repr__", + "if __name__ == .__main__.:", + "if TYPE_CHECKING:", + "return NotImplemented", +] + +[tool.pytest.ini_options] +log_cli_level = "DEBUG" +log_format = "%(asctime)s %(levelname)s %(message)s" +log_date_format = "%Y-%m-%d %H:%M:%S" +testpaths = [ + "tests", +] + +[tool.flake8] +show_source = true +ignore = [ + "D100", # missing docstring in public module + "D104", # missing docstring in public package + "D105", # missing docstring in magic method + "W503", # line break before binary operator + "E203", # whitespace before ':' + "E501", # line too long + "E731", # do not assign a lambda expression +] +per-file-ignores = [ + "tests/*:D101,D102,D103", # missing docstring in public class, method, function +] \ No newline at end of file diff --git a/tests/requirements.txt b/tests/requirements.txt index 09f63266..408de626 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -3,7 +3,6 @@ responses>=0.9.0 pytest<=7.1.3 pytest-cov pytest-html -flake8 requests-mock moto>=5.0.16,<6 python-gnupg>=0.5.0,<1 From 99c6fc816204710c16dbe4cfcfdc28374950c645 Mon Sep 17 00:00:00 2001 From: Gang Li Date: Mon, 23 Jun 2025 15:59:04 +0800 Subject: [PATCH 11/23] Update version to 1.3.4 --- charon.spec | 2 +- pyproject.toml | 4 ++-- setup.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/charon.spec b/charon.spec index 147d9885..95eb21a9 100644 --- a/charon.spec +++ b/charon.spec @@ -1,7 +1,7 @@ %global owner Commonjava %global modulename charon -%global charon_version 1.3.3 +%global charon_version 1.3.4 %global sdist_tar_name %{modulename}-%{charon_version} %global python3_pkgversion 3 diff --git a/pyproject.toml b/pyproject.toml index 43ab9cb4..8e211380 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ requires = ["setuptools", "setuptools-scm"] [project] name = "charon" -version = "1.3.3" +version = "1.3.4" authors = [ {name = "RedHat EXD SPMM"}, ] @@ -105,4 +105,4 @@ ignore = [ ] per-file-ignores = [ "tests/*:D101,D102,D103", # missing docstring in public class, method, function -] \ No newline at end of file +] diff --git a/setup.py b/setup.py index 692b53eb..934ae861 100755 --- a/setup.py +++ b/setup.py @@ -15,7 +15,7 @@ """ from setuptools import setup, find_packages -version = "1.3.3" +version = "1.3.4" long_description = """ This charon is a tool to synchronize several types of From 8ae18613bbf73cbd8d2e045ecfd8fbcc48e2f366 Mon Sep 17 00:00:00 2001 From: Gang Li Date: Wed, 25 Jun 2025 20:24:05 +0800 Subject: [PATCH 12/23] Chore: replace all file write with files.overwrite_file --- charon/pkgs/checksum_http.py | 16 ++++++++-------- charon/pkgs/indexing.py | 5 ++--- charon/utils/files.py | 3 +-- 3 files changed, 11 insertions(+), 13 deletions(-) diff --git a/charon/pkgs/checksum_http.py b/charon/pkgs/checksum_http.py index e57dab34..e30a373e 100644 --- a/charon/pkgs/checksum_http.py +++ b/charon/pkgs/checksum_http.py @@ -13,7 +13,7 @@ See the License for the specific language governing permissions and limitations under the License. """ -from charon.utils.files import digest, HashType +from charon.utils.files import digest, HashType, overwrite_file from charon.storage import S3Client from typing import Tuple, List, Dict, Optional from html.parser import HTMLParser @@ -169,9 +169,10 @@ def _check_and_remove_file(file_name: str): def _write_one_col_file(items: List[str], file_name: str): if items and len(items) > 0: _check_and_remove_file(file_name) - with open(file_name, "w") as f: - for i in items: - f.write(i + "\n") + content = "" + for i in items: + content = content + i + "\n" + overwrite_file(file_name, content) logger.info("The report file %s is generated.", file_name) _write_one_col_file(content[0], os.path.join(work_dir, "mismatched_files.csv")) @@ -180,10 +181,9 @@ def _write_one_col_file(items: List[str], file_name: str): if content[2] and len(content[2]) > 0: error_file = os.path.join(work_dir, "error_files.csv") _check_and_remove_file(error_file) - with open(error_file, "w") as f: - f.write("path,error\n") - for d in content[2]: - f.write("{path},{error}\n".format(path=d["path"], error=d["error"])) + f_content_lines: List[str] = [] + f_content = "path,error\n" + "\n".join(f_content_lines) + overwrite_file(error_file, f_content) logger.info("The report file %s is generated.", error_file) diff --git a/charon/pkgs/indexing.py b/charon/pkgs/indexing.py index 4710cdab..d0e70638 100644 --- a/charon/pkgs/indexing.py +++ b/charon/pkgs/indexing.py @@ -19,7 +19,7 @@ # from charon.pkgs.pkg_utils import invalidate_cf_paths from charon.constants import (INDEX_HTML_TEMPLATE, NPM_INDEX_HTML_TEMPLATE, PACKAGE_TYPE_MAVEN, PACKAGE_TYPE_NPM, PROD_INFO_SUFFIX) -from charon.utils.files import digest_content +from charon.utils.files import digest_content, overwrite_file from jinja2 import Template import os import logging @@ -155,8 +155,7 @@ def __to_html(package_type: str, contents: List[str], folder: str, top_level: st if folder == "/": html_path = os.path.join(top_level, "index.html") os.makedirs(os.path.dirname(html_path), exist_ok=True) - with open(html_path, 'w', encoding='utf-8') as html: - html.write(html_content) + overwrite_file(html_path, html_content) return html_path diff --git a/charon/utils/files.py b/charon/utils/files.py index d811200b..ccad3e23 100644 --- a/charon/utils/files.py +++ b/charon/utils/files.py @@ -125,6 +125,5 @@ def write_manifest(paths: List[str], root: str, product_key: str) -> Tuple[str, if not os.path.isfile(manifest_path): with open(manifest_path, mode="a", encoding="utf-8"): pass - with open(manifest_path, mode="w", encoding="utf-8") as f: - f.write('\n'.join(artifacts)) + overwrite_file(manifest_path, '\n'.join(artifacts)) return manifest_name, manifest_path From ecb572491b2524c35059b29ffae064523e035e2a Mon Sep 17 00:00:00 2001 From: Gang Li Date: Wed, 9 Jul 2025 10:12:54 +0800 Subject: [PATCH 13/23] Feat: support recursive indexing for index function --- charon/cmd/cmd_index.py | 18 +++++++++++++++++- charon/pkgs/indexing.py | 24 +++++++++++++++++++----- 2 files changed, 36 insertions(+), 6 deletions(-) diff --git a/charon/cmd/cmd_index.py b/charon/cmd/cmd_index.py index e5dd11a5..7d4c07a6 100644 --- a/charon/cmd/cmd_index.py +++ b/charon/cmd/cmd_index.py @@ -42,6 +42,13 @@ """, required=True ) +@option( + "--recursive", + "-r", + help="If do indexing recursively under $path", + is_flag=True, + default=False +) @option( "--config", "-c", @@ -69,6 +76,7 @@ def index( path: str, target: str, + recursive: bool = False, config: str = None, debug: bool = False, quiet: bool = False, @@ -120,7 +128,15 @@ def index( if not aws_bucket: logger.error("No bucket specified for target %s!", target) else: - re_index(b, path, package_type, aws_profile, dryrun) + args = { + "target": b, + "path": path, + "package_type": package_type, + "aws_profile": aws_profile, + "recursive": recursive, + "dry_run": dryrun + } + re_index(**args) # type: ignore except Exception: print(traceback.format_exc()) diff --git a/charon/pkgs/indexing.py b/charon/pkgs/indexing.py index d0e70638..6794a478 100644 --- a/charon/pkgs/indexing.py +++ b/charon/pkgs/indexing.py @@ -266,7 +266,7 @@ def re_index( path: str, package_type: str, aws_profile: str = None, - # cf_enable: bool = False, + recursive: bool = False, dry_run: bool = False ): """Refresh the index.html for the specified folder in the bucket. @@ -306,6 +306,7 @@ def re_index( logger.debug("The re-indexed page content: %s", index_content) if not dry_run: index_path = os.path.join(path, "index.html") + logger.info("Start re-indexing %s in bucket %s", index_path, bucket_name) if path == "/": index_path = "index.html" s3_client.simple_delete_file(index_path, (bucket_name, real_prefix)) @@ -313,10 +314,23 @@ def re_index( index_path, index_content, (bucket_name, real_prefix), "text/html", digest_content(index_content) ) - # We will not invalidate index.html per cost consideration - # if cf_enable: - # cf_client = CFClient(aws_profile=aws_profile) - # invalidate_cf_paths(cf_client, bucket, [index_path]) + logger.info("%s re-indexing finished", index_path) + if recursive: + for c in contents: + if c.endswith("/"): + sub_path = c.removeprefix(real_prefix).strip() + if sub_path.startswith("/"): + sub_path = sub_path.removeprefix("/") + logger.debug("subpath: %s", sub_path) + args = { + "target": target, + "path": sub_path, + "package_type": package_type, + "aws_profile": aws_profile, + "recursive": recursive, + "dry_run": dry_run + } + re_index(**args) # type: ignore else: logger.warning( "The path %s does not contain any contents in bucket %s. " From cc52352e16335a6c1349610b9c0a18ceb00212b5 Mon Sep 17 00:00:00 2001 From: Gang Li Date: Fri, 19 Sep 2025 10:09:22 +0800 Subject: [PATCH 14/23] Fix a issue of pyproject.toml file --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 8e211380..7fa24200 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,7 +10,7 @@ authors = [ ] readme = "README.md" keywords = ["charon", "mrrc", "maven", "npm", "build", "java"] -license-files = ["LICENSE"] +license = {file="LICENSE"} requires-python = ">=3.9" classifiers = [ "Development Status :: 1 - Planning", From 308f53aadaa95f92cbf2cea3dae6066e8a62d53d Mon Sep 17 00:00:00 2001 From: Gang Li Date: Fri, 19 Sep 2025 11:28:06 +0800 Subject: [PATCH 15/23] Some fix for setup tools * Add MANIFEST.in for sdist generation * Fix some warning for sdist build --- MANIFEST.in | 8 ++++++++ pyproject.toml | 7 +++---- setup.py | 29 +++++++++++++---------------- 3 files changed, 24 insertions(+), 20 deletions(-) create mode 100644 MANIFEST.in diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 00000000..6a2a3c77 --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,8 @@ + include LICENSE + include README.md + include pyproject.toml + include setup.py + recursive-include charon *.py *.json + recursive-include tests *.py *.txt *.tgz *.zip *.json *.sha1 + exclude .github .gitignore + diff --git a/pyproject.toml b/pyproject.toml index 7fa24200..2d2764e1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,12 +10,11 @@ authors = [ ] readme = "README.md" keywords = ["charon", "mrrc", "maven", "npm", "build", "java"] -license = {file="LICENSE"} +license = "Apache-2.0" requires-python = ">=3.9" classifiers = [ "Development Status :: 1 - Planning", "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", "Topic :: Software Development :: Build Tools", "Topic :: Utilities", "Programming Language :: Python :: 3 :: Only", @@ -61,8 +60,8 @@ test = [ [project.scripts] charon = "charon.cmd:cli" -[tool.setuptools] -packages = ["charon"] +[tool.setuptools.packages.find] +include = ["charon*"] [tool.setuptools_scm] fallback_version = "1.3.4+dev.fallback" diff --git a/setup.py b/setup.py index 934ae861..5c37fc84 100755 --- a/setup.py +++ b/setup.py @@ -32,31 +32,28 @@ classifiers=[ "Development Status :: 1 - Planning", "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", "Programming Language :: Python :: 3", "Topic :: Software Development :: Build Tools", "Topic :: Utilities", ], keywords="charon mrrc maven npm build java", author="RedHat EXD SPMM", - license="APLv2", packages=find_packages(exclude=["ez_setup", "examples", "tests"]), package_data={'charon': ['schemas/*.json']}, - test_suite="tests", entry_points={ "console_scripts": ["charon = charon.cmd:cli"], }, - install_requires=[ - "Jinja2>=3.1.3", - "boto3>=1.18.35", - "botocore>=1.21.35", - "click>=8.1.3", - "requests>=2.25.0", - "PyYAML>=5.4.1", - "defusedxml>=0.7.1", - "subresource-integrity>=0.2", - "jsonschema>=4.9.1", - "urllib3>=1.25.10", - "semantic-version>=2.10.0" - ], + # install_requires=[ + # "Jinja2>=3.1.3", + # "boto3>=1.18.35", + # "botocore>=1.21.35", + # "click>=8.1.3", + # "requests>=2.25.0", + # "PyYAML>=5.4.1", + # "defusedxml>=0.7.1", + # "subresource-integrity>=0.2", + # "jsonschema>=4.9.1", + # "urllib3>=1.25.10", + # "semantic-version>=2.10.0" + # ], ) From 00e7e19d2a2de0dcacee2a68e93cf3a96f89d286 Mon Sep 17 00:00:00 2001 From: Gang Li Date: Fri, 19 Sep 2025 12:50:28 +0800 Subject: [PATCH 16/23] Fix pyproject.toml license issue --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 2d2764e1..cd439657 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,7 +10,7 @@ authors = [ ] readme = "README.md" keywords = ["charon", "mrrc", "maven", "npm", "build", "java"] -license = "Apache-2.0" +license = {text="Apache-2.0"} requires-python = ">=3.9" classifiers = [ "Development Status :: 1 - Planning", From 29594e57b7d9b31df6757af896b5db8eaeb6415b Mon Sep 17 00:00:00 2001 From: yma Date: Mon, 13 Oct 2025 15:50:03 +0800 Subject: [PATCH 17/23] Feat: Accept multiple maven zips with non-RADAS signing way --- charon/cmd/cmd_upload.py | 65 ++++++++------- charon/cmd/internal.py | 8 ++ charon/pkgs/maven.py | 140 ++++++++++++++++++++++++++++++++- charon/utils/archive.py | 13 +++ tests/test_archive.py | 33 +++++++- tests/test_extract_tarballs.py | 31 ++++++++ tests/test_maven_upload.py | 62 +++++++++++++++ 7 files changed, 320 insertions(+), 32 deletions(-) create mode 100644 tests/test_extract_tarballs.py diff --git a/charon/cmd/cmd_upload.py b/charon/cmd/cmd_upload.py index a867df01..dcc24a88 100644 --- a/charon/cmd/cmd_upload.py +++ b/charon/cmd/cmd_upload.py @@ -16,12 +16,12 @@ from typing import List from charon.config import get_config -from charon.utils.archive import detect_npm_archive, NpmArchiveType +from charon.utils.archive import detect_npm_archives, NpmArchiveType from charon.pkgs.maven import handle_maven_uploading from charon.pkgs.npm import handle_npm_uploading from charon.cmd.internal import ( _decide_mode, _validate_prod_key, - _get_local_repo, _get_targets, + _get_local_repos, _get_targets, _get_ignore_patterns, _safe_delete ) from click import command, option, argument @@ -35,8 +35,10 @@ @argument( - "repo", + "repos", type=str, + nargs='+', # This allows multiple arguments for zip urls + required=True ) @option( "--product", @@ -138,7 +140,7 @@ @option("--dryrun", "-n", is_flag=True, default=False) @command() def upload( - repo: str, + repos: List[str], product: str, version: str, targets: List[str], @@ -152,9 +154,9 @@ def upload( quiet=False, dryrun=False ): - """Upload all files from a released product REPO to Ronda - Service. The REPO points to a product released tarball which - is hosted in a remote url or a local path. + """Upload all files from released product REPOs to Ronda + Service. The REPOs point to a product released tarballs which + are hosted in remote urls or local paths. """ tmp_dir = work_dir try: @@ -173,8 +175,8 @@ def upload( logger.error("No AWS profile specified!") sys.exit(1) - archive_path = _get_local_repo(repo) - npm_archive_type = detect_npm_archive(archive_path) + archive_paths = _get_local_repos(repos) + archive_types = detect_npm_archives(archive_paths) product_key = f"{product}-{version}" manifest_bucket_name = conf.get_manifest_bucket() targets_ = _get_targets(targets, conf) @@ -185,23 +187,10 @@ def upload( " are set correctly.", targets_ ) sys.exit(1) - if npm_archive_type != NpmArchiveType.NOT_NPM: - logger.info("This is a npm archive") - tmp_dir, succeeded = handle_npm_uploading( - archive_path, - product_key, - targets=targets_, - aws_profile=aws_profile, - dir_=work_dir, - gen_sign=contain_signature, - cf_enable=conf.is_aws_cf_enable(), - key=sign_key, - dry_run=dryrun, - manifest_bucket_name=manifest_bucket_name - ) - if not succeeded: - sys.exit(1) - else: + + maven_count = archive_types.count(NpmArchiveType.NOT_NPM) + npm_count = len(archive_types) - maven_count + if maven_count == len(archive_types): ignore_patterns_list = None if ignore_patterns: ignore_patterns_list = ignore_patterns @@ -209,7 +198,7 @@ def upload( ignore_patterns_list = _get_ignore_patterns(conf) logger.info("This is a maven archive") tmp_dir, succeeded = handle_maven_uploading( - archive_path, + archive_paths, product_key, ignore_patterns_list, root=root_path, @@ -225,6 +214,28 @@ def upload( ) if not succeeded: sys.exit(1) + elif npm_count == len(archive_types) and len(archive_types) == 1: + logger.info("This is a npm archive") + tmp_dir, succeeded = handle_npm_uploading( + archive_paths[0], + product_key, + targets=targets_, + aws_profile=aws_profile, + dir_=work_dir, + gen_sign=contain_signature, + cf_enable=conf.is_aws_cf_enable(), + key=sign_key, + dry_run=dryrun, + manifest_bucket_name=manifest_bucket_name + ) + if not succeeded: + sys.exit(1) + elif npm_count == len(archive_types) and len(archive_types) > 1: + logger.error("Doesn't support multiple upload for npm") + sys.exit(1) + else: + logger.error("Upload types are not consistent") + sys.exit(1) except Exception: print(traceback.format_exc()) sys.exit(2) # distinguish between exception and bad config or bad state diff --git a/charon/cmd/internal.py b/charon/cmd/internal.py index e7e7d14a..89d4ea1b 100644 --- a/charon/cmd/internal.py +++ b/charon/cmd/internal.py @@ -75,6 +75,14 @@ def _get_local_repo(url: str) -> str: return archive_path +def _get_local_repos(urls: list) -> list: + archive_paths = [] + for url in urls: + archive_path = _get_local_repo(url) + archive_paths.append(archive_path) + return archive_paths + + def _validate_prod_key(product: str, version: str) -> bool: if not product or product.strip() == "": logger.error("Error: product can not be empty!") diff --git a/charon/pkgs/maven.py b/charon/pkgs/maven.py index 9f50f35b..b183c474 100644 --- a/charon/pkgs/maven.py +++ b/charon/pkgs/maven.py @@ -32,11 +32,12 @@ META_FILE_FAILED, MAVEN_METADATA_TEMPLATE, ARCHETYPE_CATALOG_TEMPLATE, ARCHETYPE_CATALOG_FILENAME, PACKAGE_TYPE_MAVEN) -from typing import Dict, List, Tuple +from typing import Dict, List, Tuple, Union from jinja2 import Template from datetime import datetime from zipfile import ZipFile, BadZipFile from tempfile import mkdtemp +from shutil import rmtree, copy2 from defusedxml import ElementTree import os @@ -261,7 +262,7 @@ def __gen_digest_file(hash_file_path, meta_file_path: str, hashtype: HashType) - def handle_maven_uploading( - repo: str, + repos: Union[str, List[str]], prod_key: str, ignore_patterns=None, root="maven-repository", @@ -294,8 +295,10 @@ def handle_maven_uploading( """ if targets is None: targets = [] - # 1. extract tarball - tmp_root = _extract_tarball(repo, prod_key, dir__=dir_) + if isinstance(repos, str): + repos = [repos] + # 1. extract tarballs + tmp_root = _extract_tarballs(repos, root, prod_key, dir__=dir_) # 2. scan for paths and filter out the ignored paths, # and also collect poms for later metadata generation @@ -673,6 +676,135 @@ def _extract_tarball(repo: str, prefix="", dir__=None) -> str: sys.exit(1) +def _extract_tarballs(repos: List[str], root: str, prefix="", dir__=None) -> str: + """ Extract multiple zip archives to a temporary directory. + * repos are the list of repo paths to extract + * root is a prefix in the tarball to identify which path is + the beginning of the maven GAV path + * prefix is the prefix for temporary directory name + * dir__ is the directory where temporary directories will be created. + + Returns the path to the merged temporary directory containing all extracted files + """ + # Create final merge directory + final_tmp_root = mkdtemp(prefix=f"charon-{prefix}-final-", dir=dir__) + + total_copied = 0 + total_overwritten = 0 + total_processed = 0 + + # Collect all extracted directories first + extracted_dirs = [] + + for repo in repos: + if os.path.exists(repo): + try: + logger.info("Extracting tarball %s", repo) + repo_zip = ZipFile(repo) + tmp_root = mkdtemp(prefix=f"charon-{prefix}-", dir=dir__) + extract_zip_all(repo_zip, tmp_root) + extracted_dirs.append(tmp_root) + + except BadZipFile as e: + logger.error("Tarball extraction error: %s", e) + sys.exit(1) + else: + logger.error("Error: archive %s does not exist", repo) + sys.exit(1) + + # Merge all extracted directories + if extracted_dirs: + # Get top-level directory names for merged from all repos + top_level_merged_name_dirs = [] + for extracted_dir in extracted_dirs: + for item in os.listdir(extracted_dir): + item_path = os.path.join(extracted_dir, item) + # Check the root maven-repository subdirectory existence + maven_repo_path = os.path.join(item_path, root) + if os.path.isdir(item_path) and os.path.exists(maven_repo_path): + top_level_merged_name_dirs.append(item) + break + + # Create merged directory name + merged_dir_name = ( + "_".join(top_level_merged_name_dirs) if top_level_merged_name_dirs else "merged" + ) + merged_dest_dir = os.path.join(final_tmp_root, merged_dir_name) + + # Merge content from all extracted directories + for extracted_dir in extracted_dirs: + copied, overwritten, processed = _merge_directories_with_rename( + extracted_dir, merged_dest_dir, root + ) + total_copied += copied + total_overwritten += overwritten + total_processed += processed + + # Clean up temporary extraction directory + rmtree(extracted_dir) + + logger.info( + "All zips merged! Total copied: %s, Total overwritten: %s, Total processed: %s", + total_copied, + total_overwritten, + total_processed, + ) + return final_tmp_root + + +def _merge_directories_with_rename(src_dir: str, dest_dir: str, root: str): + """ Recursively copy files from src_dir to dest_dir, overwriting existing files. + * src_dir is the source directory to copy from + * dest_dir is the destination directory to copy to. + + Returns Tuple of (copied_count, overwritten_count, processed_count) + """ + copied_count = 0 + overwritten_count = 0 + processed_count = 0 + + # Find the actual content directory + content_root = src_dir + for item in os.listdir(src_dir): + item_path = os.path.join(src_dir, item) + # Check the root maven-repository subdirectory existence + maven_repo_path = os.path.join(item_path, root) + if os.path.isdir(item_path) and os.path.exists(maven_repo_path): + content_root = item_path + break + + # pylint: disable=unused-variable + for root_dir, dirs, files in os.walk(content_root): + # Calculate relative path from content root + rel_path = os.path.relpath(root_dir, content_root) + dest_root = os.path.join(dest_dir, rel_path) if rel_path != '.' else dest_dir + + # Create destination directory if it doesn't exist + os.makedirs(dest_root, exist_ok=True) + + # Copy all files, overwriting existing ones + for file in files: + src_file = os.path.join(root_dir, file) + dest_file = os.path.join(dest_root, file) + if os.path.exists(dest_file): + overwritten_count += 1 + logger.debug("Overwritten: %s -> %s", src_file, dest_file) + else: + copied_count += 1 + logger.debug("Copied: %s -> %s", src_file, dest_file) + + processed_count += 1 + copy2(src_file, dest_file) + + logger.info( + "One zip merged! Files copied: %s, Files overwritten: %s, Total files processed: %s", + copied_count, + overwritten_count, + processed_count, + ) + return copied_count, overwritten_count, processed_count + + def _scan_paths(files_root: str, ignore_patterns: List[str], root: str) -> Tuple[str, List[str], List[str], List[str]]: # 2. scan for paths and filter out the ignored paths, diff --git a/charon/utils/archive.py b/charon/utils/archive.py index 4a1f256c..058fa17e 100644 --- a/charon/utils/archive.py +++ b/charon/utils/archive.py @@ -182,6 +182,19 @@ def detect_npm_archive(repo): return NpmArchiveType.NOT_NPM +def detect_npm_archives(repos): + """Detects, if the archives need to have npm workflow. + :parameter repos list of repository directories + :return list of NpmArchiveType values + """ + results = [] + for repo in repos: + result = detect_npm_archive(repo) + results.append(result) + + return results + + def download_archive(url: str, base_dir=None) -> str: dir_ = base_dir if not dir_ or not os.path.isdir(dir_): diff --git a/tests/test_archive.py b/tests/test_archive.py index 0e2ac09a..22cf48fd 100644 --- a/tests/test_archive.py +++ b/tests/test_archive.py @@ -1,5 +1,5 @@ from tests.base import BaseTest -from charon.utils.archive import NpmArchiveType, detect_npm_archive +from charon.utils.archive import NpmArchiveType, detect_npm_archive, detect_npm_archives import os from tests.constants import INPUTS @@ -12,5 +12,36 @@ def test_detect_package(self): npm_tarball = os.path.join(INPUTS, "code-frame-7.14.5.tgz") self.assertEqual(NpmArchiveType.TAR_FILE, detect_npm_archive(npm_tarball)) + def test_detect_packages(self): + mvn_tarballs = [ + os.path.join(INPUTS, "commons-client-4.5.6.zip"), + os.path.join(INPUTS, "commons-client-4.5.9.zip") + ] + archive_types = detect_npm_archives(mvn_tarballs) + self.assertEqual(2, archive_types.count(NpmArchiveType.NOT_NPM)) + + npm_tarball = [ + os.path.join(INPUTS, "code-frame-7.14.5.tgz") + ] + archive_types = detect_npm_archives(npm_tarball) + self.assertEqual(1, archive_types.count(NpmArchiveType.TAR_FILE)) + + npm_tarballs = [ + os.path.join(INPUTS, "code-frame-7.14.5.tgz"), + os.path.join(INPUTS, "code-frame-7.15.8.tgz") + ] + archive_types = detect_npm_archives(npm_tarballs) + self.assertEqual(2, archive_types.count(NpmArchiveType.TAR_FILE)) + + tarballs = [ + os.path.join(INPUTS, "commons-client-4.5.6.zip"), + os.path.join(INPUTS, "commons-client-4.5.9.zip"), + os.path.join(INPUTS, "code-frame-7.14.5.tgz"), + os.path.join(INPUTS, "code-frame-7.15.8.tgz") + ] + archive_types = detect_npm_archives(tarballs) + self.assertEqual(2, archive_types.count(NpmArchiveType.NOT_NPM)) + self.assertEqual(2, archive_types.count(NpmArchiveType.TAR_FILE)) + def test_download_archive(self): pass diff --git a/tests/test_extract_tarballs.py b/tests/test_extract_tarballs.py new file mode 100644 index 00000000..22190bfc --- /dev/null +++ b/tests/test_extract_tarballs.py @@ -0,0 +1,31 @@ +from tests.base import BaseTest +from charon.pkgs.maven import _extract_tarballs +import os + +from tests.constants import INPUTS + + +class ArchiveTest(BaseTest): + def test_extract_tarballs(self): + mvn_tarballs = [ + os.path.join(INPUTS, "commons-client-4.5.6.zip"), + os.path.join(INPUTS, "commons-client-4.5.9.zip"), + ] + final_merged_path = _extract_tarballs(mvn_tarballs, "maven-repository") + expected_dir = os.path.join( + final_merged_path, "commons-client-4.5.6_commons-client-4.5.9", "maven-repository" + ) + self.assertTrue(os.path.exists(expected_dir)) + + expected_files = [ + "org/apache/httpcomponents/httpclient/4.5.9/httpclient-4.5.9.jar", + "org/apache/httpcomponents/httpclient/4.5.9/httpclient-4.5.9.pom", + "org/apache/httpcomponents/httpclient/4.5.6/httpclient-4.5.6.jar", + "org/apache/httpcomponents/httpclient/4.5.6/httpclient-4.5.6.pom", + ] + for expected_file in expected_files: + file_path = os.path.join(expected_dir, expected_file) + self.assertTrue(os.path.exists(file_path)) + + def test_download_archive(self): + pass diff --git a/tests/test_maven_upload.py b/tests/test_maven_upload.py index 629a9e3f..6f40a8ca 100644 --- a/tests/test_maven_upload.py +++ b/tests/test_maven_upload.py @@ -110,6 +110,68 @@ def test_overlap_upload(self): self.assertIn("httpclient", cat_content) self.assertIn("org.apache.httpcomponents", cat_content) + def test_multi_zips_upload(self): + mvn_tarballs = [ + os.path.join(INPUTS, "commons-client-4.5.6.zip"), + os.path.join(INPUTS, "commons-client-4.5.9.zip") + ] + product_45 = "commons-client-4.5" + + handle_maven_uploading( + mvn_tarballs, product_45, + targets=[('', TEST_BUCKET, '', '')], + dir_=self.tempdir, do_index=False + ) + + objs = list(self.test_bucket.objects.all()) + actual_files = [obj.key for obj in objs] + # need to double mvn num because of .prodinfo files + self.assertEqual( + COMMONS_CLIENT_MVN_NUM * 2 + COMMONS_CLIENT_META_NUM, + len(actual_files) + ) + + filesets = [ + COMMONS_CLIENT_METAS, COMMONS_CLIENT_456_FILES, + COMMONS_CLIENT_459_FILES, + ARCHETYPE_CATALOG_FILES + ] + for fileset in filesets: + for f in fileset: + self.assertIn(f, actual_files) + + product_mix = [product_45] + for f in COMMONS_LOGGING_FILES: + self.assertIn(f, actual_files) + self.check_product(f, product_mix) + for f in COMMONS_LOGGING_METAS: + self.assertIn(f, actual_files) + + meta_obj_client = self.test_bucket.Object(COMMONS_CLIENT_METAS[0]) + meta_content_client = str(meta_obj_client.get()["Body"].read(), "utf-8") + self.assertIn( + "org.apache.httpcomponents", meta_content_client + ) + self.assertIn("httpclient", meta_content_client) + self.assertIn("4.5.9", meta_content_client) + self.assertIn("4.5.9", meta_content_client) + self.assertIn("4.5.6", meta_content_client) + self.assertIn("4.5.9", meta_content_client) + + meta_obj_logging = self.test_bucket.Object(COMMONS_LOGGING_METAS[0]) + meta_content_logging = str(meta_obj_logging.get()["Body"].read(), "utf-8") + self.assertIn("commons-logging", meta_content_logging) + self.assertIn("commons-logging", meta_content_logging) + self.assertIn("1.2", meta_content_logging) + self.assertIn("1.2", meta_content_logging) + self.assertIn("1.2", meta_content_logging) + + catalog = self.test_bucket.Object(ARCHETYPE_CATALOG) + cat_content = str(catalog.get()["Body"].read(), "utf-8") + self.assertIn("4.5.9", cat_content) + self.assertIn("httpclient", cat_content) + self.assertIn("org.apache.httpcomponents", cat_content) + def test_ignore_upload(self): test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product_456 = "commons-client-4.5.6" From 6d4dc56a59101ca38a3bb52de57badb6fc586741 Mon Sep 17 00:00:00 2001 From: yma Date: Tue, 14 Oct 2025 14:15:14 +0800 Subject: [PATCH 18/23] Fix TypeError for argument multi nargs value defination --- charon/cmd/cmd_upload.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/charon/cmd/cmd_upload.py b/charon/cmd/cmd_upload.py index dcc24a88..4cd0d8dd 100644 --- a/charon/cmd/cmd_upload.py +++ b/charon/cmd/cmd_upload.py @@ -37,8 +37,7 @@ @argument( "repos", type=str, - nargs='+', # This allows multiple arguments for zip urls - required=True + nargs=-1 # This allows multiple arguments for zip urls ) @option( "--product", From f3a2c55d9233895f39d330aae3120fde3a7e17cc Mon Sep 17 00:00:00 2001 From: yma Date: Fri, 17 Oct 2025 11:54:43 +0800 Subject: [PATCH 19/23] Fix note, err log, repos param list type, merged dir name length, typo issues --- charon/cmd/cmd_upload.py | 1 + charon/pkgs/maven.py | 24 +++++------------------- tests/test_cf_maven_ops.py | 4 ++-- tests/test_cf_reindex.py | 2 +- tests/test_extract_tarballs.py | 5 +---- tests/test_manifest_del.py | 2 +- tests/test_manifest_upload.py | 2 +- tests/test_maven_del.py | 4 ++-- tests/test_maven_del_multi_tgts.py | 4 ++-- tests/test_maven_index.py | 14 +++++++------- tests/test_maven_index_multi_tgts.py | 12 ++++++------ tests/test_maven_sign.py | 6 +++--- tests/test_maven_upload.py | 8 ++++---- tests/test_maven_upload_multi_tgts.py | 8 ++++---- tests/test_pkgs_dryrun.py | 6 +++--- 15 files changed, 43 insertions(+), 59 deletions(-) diff --git a/charon/cmd/cmd_upload.py b/charon/cmd/cmd_upload.py index 4cd0d8dd..f1e4df3f 100644 --- a/charon/cmd/cmd_upload.py +++ b/charon/cmd/cmd_upload.py @@ -156,6 +156,7 @@ def upload( """Upload all files from released product REPOs to Ronda Service. The REPOs point to a product released tarballs which are hosted in remote urls or local paths. + Notes: It does not support multiple repos for NPM archives """ tmp_dir = work_dir try: diff --git a/charon/pkgs/maven.py b/charon/pkgs/maven.py index b183c474..90724050 100644 --- a/charon/pkgs/maven.py +++ b/charon/pkgs/maven.py @@ -32,7 +32,7 @@ META_FILE_FAILED, MAVEN_METADATA_TEMPLATE, ARCHETYPE_CATALOG_TEMPLATE, ARCHETYPE_CATALOG_FILENAME, PACKAGE_TYPE_MAVEN) -from typing import Dict, List, Tuple, Union +from typing import Dict, List, Tuple from jinja2 import Template from datetime import datetime from zipfile import ZipFile, BadZipFile @@ -262,7 +262,7 @@ def __gen_digest_file(hash_file_path, meta_file_path: str, hashtype: HashType) - def handle_maven_uploading( - repos: Union[str, List[str]], + repos: List[str], prod_key: str, ignore_patterns=None, root="maven-repository", @@ -295,8 +295,7 @@ def handle_maven_uploading( """ if targets is None: targets = [] - if isinstance(repos, str): - repos = [repos] + # 1. extract tarballs tmp_root = _extract_tarballs(repos, root, prod_key, dir__=dir_) @@ -706,7 +705,7 @@ def _extract_tarballs(repos: List[str], root: str, prefix="", dir__=None) -> str extracted_dirs.append(tmp_root) except BadZipFile as e: - logger.error("Tarball extraction error: %s", e) + logger.error("Tarball extraction error for repo %s: %s", repo, e) sys.exit(1) else: logger.error("Error: archive %s does not exist", repo) @@ -714,21 +713,8 @@ def _extract_tarballs(repos: List[str], root: str, prefix="", dir__=None) -> str # Merge all extracted directories if extracted_dirs: - # Get top-level directory names for merged from all repos - top_level_merged_name_dirs = [] - for extracted_dir in extracted_dirs: - for item in os.listdir(extracted_dir): - item_path = os.path.join(extracted_dir, item) - # Check the root maven-repository subdirectory existence - maven_repo_path = os.path.join(item_path, root) - if os.path.isdir(item_path) and os.path.exists(maven_repo_path): - top_level_merged_name_dirs.append(item) - break - # Create merged directory name - merged_dir_name = ( - "_".join(top_level_merged_name_dirs) if top_level_merged_name_dirs else "merged" - ) + merged_dir_name = "merged_repositories" merged_dest_dir = os.path.join(final_tmp_root, merged_dir_name) # Merge content from all extracted directories diff --git a/tests/test_cf_maven_ops.py b/tests/test_cf_maven_ops.py index b8cb03c1..ca5ac361 100644 --- a/tests/test_cf_maven_ops.py +++ b/tests/test_cf_maven_ops.py @@ -31,7 +31,7 @@ def test_cf_after_upload(self): test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product = "commons-client-4.5.6" handle_maven_uploading( - test_zip, product, + [test_zip], product, targets=[('', TEST_BUCKET, 'ga', '', 'maven.repository.redhat.com')], dir_=self.tempdir, do_index=True, @@ -52,7 +52,7 @@ def test_cf_after_del(self): test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product_456 = "commons-client-4.5.6" handle_maven_uploading( - test_zip, product_456, + [test_zip], product_456, targets=[('', TEST_BUCKET, 'ga', '', 'maven.repository.redhat.com')], dir_=self.tempdir, do_index=True diff --git a/tests/test_cf_reindex.py b/tests/test_cf_reindex.py index 944a86f2..941793fd 100644 --- a/tests/test_cf_reindex.py +++ b/tests/test_cf_reindex.py @@ -40,7 +40,7 @@ def test_cf_maven_after_reindex(self): test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product_456 = "commons-client-4.5.6" handle_maven_uploading( - test_zip, product_456, + [test_zip], product_456, targets=[('', TEST_BUCKET, 'ga', '', 'maven.repository.redhat.com')], dir_=self.tempdir ) diff --git a/tests/test_extract_tarballs.py b/tests/test_extract_tarballs.py index 22190bfc..53a96f63 100644 --- a/tests/test_extract_tarballs.py +++ b/tests/test_extract_tarballs.py @@ -13,7 +13,7 @@ def test_extract_tarballs(self): ] final_merged_path = _extract_tarballs(mvn_tarballs, "maven-repository") expected_dir = os.path.join( - final_merged_path, "commons-client-4.5.6_commons-client-4.5.9", "maven-repository" + final_merged_path, "merged_repositories", "maven-repository" ) self.assertTrue(os.path.exists(expected_dir)) @@ -26,6 +26,3 @@ def test_extract_tarballs(self): for expected_file in expected_files: file_path = os.path.join(expected_dir, expected_file) self.assertTrue(os.path.exists(file_path)) - - def test_download_archive(self): - pass diff --git a/tests/test_manifest_del.py b/tests/test_manifest_del.py index 7a81be3c..c47c7602 100644 --- a/tests/test_manifest_del.py +++ b/tests/test_manifest_del.py @@ -77,7 +77,7 @@ def __prepare_maven_content(self): test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product = "commons-client-4.5.6" handle_maven_uploading( - test_zip, product, + [test_zip], product, targets=[(TEST_TARGET, TEST_BUCKET, '', '')], dir_=self.tempdir, do_index=False, diff --git a/tests/test_manifest_upload.py b/tests/test_manifest_upload.py index c7e801b2..520f0679 100644 --- a/tests/test_manifest_upload.py +++ b/tests/test_manifest_upload.py @@ -36,7 +36,7 @@ def test_maven_manifest_upload(self): test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product = "commons-client-4.5.6" handle_maven_uploading( - test_zip, product, + [test_zip], product, targets=[(TEST_TARGET, TEST_BUCKET, '', '')], dir_=self.tempdir, do_index=False, diff --git a/tests/test_maven_del.py b/tests/test_maven_del.py index 5b565adc..86425724 100644 --- a/tests/test_maven_del.py +++ b/tests/test_maven_del.py @@ -190,7 +190,7 @@ def __prepare_content(self, prefix=None): test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product_456 = "commons-client-4.5.6" handle_maven_uploading( - test_zip, product_456, + [test_zip], product_456, targets=[('', TEST_BUCKET, prefix, '')], dir_=self.tempdir, do_index=False @@ -199,7 +199,7 @@ def __prepare_content(self, prefix=None): test_zip = os.path.join(INPUTS, "commons-client-4.5.9.zip") product_459 = "commons-client-4.5.9" handle_maven_uploading( - test_zip, product_459, + [test_zip], product_459, targets=[('', TEST_BUCKET, prefix, '')], dir_=self.tempdir, do_index=False diff --git a/tests/test_maven_del_multi_tgts.py b/tests/test_maven_del_multi_tgts.py index 26fa11cc..2a7d042f 100644 --- a/tests/test_maven_del_multi_tgts.py +++ b/tests/test_maven_del_multi_tgts.py @@ -259,7 +259,7 @@ def __prepare_content(self, prefix=None): product_456 = "commons-client-4.5.6" targets_ = [('', TEST_BUCKET, prefix, ''), ('', TEST_BUCKET_2, prefix, '')] handle_maven_uploading( - test_zip, product_456, + [test_zip], product_456, targets=targets_, dir_=self.tempdir, do_index=False @@ -268,7 +268,7 @@ def __prepare_content(self, prefix=None): test_zip = os.path.join(INPUTS, "commons-client-4.5.9.zip") product_459 = "commons-client-4.5.9" handle_maven_uploading( - test_zip, product_459, + [test_zip], product_459, targets=targets_, dir_=self.tempdir, do_index=False diff --git a/tests/test_maven_index.py b/tests/test_maven_index.py index a5cd1ed2..33533337 100644 --- a/tests/test_maven_index.py +++ b/tests/test_maven_index.py @@ -37,7 +37,7 @@ def test_uploading_index(self): test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product = "commons-client-4.5.6" handle_maven_uploading( - test_zip, product, + [test_zip], product, targets=[('', TEST_BUCKET, '', '')], dir_=self.tempdir ) @@ -79,7 +79,7 @@ def test_overlap_upload_index(self): test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product_456 = "commons-client-4.5.6" handle_maven_uploading( - test_zip, product_456, + [test_zip], product_456, targets=[('', TEST_BUCKET, '', '')], dir_=self.tempdir ) @@ -87,7 +87,7 @@ def test_overlap_upload_index(self): test_zip = os.path.join(INPUTS, "commons-client-4.5.9.zip") product_459 = "commons-client-4.5.9" handle_maven_uploading( - test_zip, product_459, + [test_zip], product_459, targets=[('', TEST_BUCKET, '', '')], dir_=self.tempdir ) @@ -130,7 +130,7 @@ def test_re_index(self): test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product = "commons-client-4.5.6" handle_maven_uploading( - test_zip, product, + [test_zip], product, targets=[('', TEST_BUCKET, '', '')], dir_=self.tempdir ) @@ -221,7 +221,7 @@ def __test_upload_index_with_prefix(self, prefix: str): test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product = "commons-client-4.5.6" handle_maven_uploading( - test_zip, product, + [test_zip], product, targets=[('', TEST_BUCKET, prefix, '')], dir_=self.tempdir ) @@ -403,7 +403,7 @@ def __prepare_content(self, prefix=None): test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product_456 = "commons-client-4.5.6" handle_maven_uploading( - test_zip, product_456, + [test_zip], product_456, targets=[('', TEST_BUCKET, prefix, '')], dir_=self.tempdir ) @@ -411,7 +411,7 @@ def __prepare_content(self, prefix=None): test_zip = os.path.join(INPUTS, "commons-client-4.5.9.zip") product_459 = "commons-client-4.5.9" handle_maven_uploading( - test_zip, product_459, + [test_zip], product_459, targets=[('', TEST_BUCKET, prefix, '')], dir_=self.tempdir ) diff --git a/tests/test_maven_index_multi_tgts.py b/tests/test_maven_index_multi_tgts.py index cc9d0718..44f921bf 100644 --- a/tests/test_maven_index_multi_tgts.py +++ b/tests/test_maven_index_multi_tgts.py @@ -46,7 +46,7 @@ def test_uploading_index(self): test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product = "commons-client-4.5.6" handle_maven_uploading( - test_zip, product, + [test_zip], product, targets=targets_, dir_=self.tempdir ) @@ -106,7 +106,7 @@ def test_overlap_upload_index(self): test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product_456 = "commons-client-4.5.6" handle_maven_uploading( - test_zip, product_456, + [test_zip], product_456, targets=targets_, dir_=self.tempdir ) @@ -114,7 +114,7 @@ def test_overlap_upload_index(self): test_zip = os.path.join(INPUTS, "commons-client-4.5.9.zip") product_459 = "commons-client-4.5.9" handle_maven_uploading( - test_zip, product_459, + [test_zip], product_459, targets=targets_, dir_=self.tempdir ) @@ -194,7 +194,7 @@ def __test_upload_index_with_prefix(self, prefix: str): test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product = "commons-client-4.5.6" handle_maven_uploading( - test_zip, product, + [test_zip], product, targets=targets_, dir_=self.tempdir ) @@ -417,7 +417,7 @@ def __prepare_content(self, prefix=None): test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product_456 = "commons-client-4.5.6" handle_maven_uploading( - test_zip, product_456, + [test_zip], product_456, targets=targets_, dir_=self.tempdir ) @@ -425,7 +425,7 @@ def __prepare_content(self, prefix=None): test_zip = os.path.join(INPUTS, "commons-client-4.5.9.zip") product_459 = "commons-client-4.5.9" handle_maven_uploading( - test_zip, product_459, + [test_zip], product_459, targets=targets_, dir_=self.tempdir ) diff --git a/tests/test_maven_sign.py b/tests/test_maven_sign.py index f60ee54d..834326bf 100644 --- a/tests/test_maven_sign.py +++ b/tests/test_maven_sign.py @@ -32,7 +32,7 @@ def test_uploading_sign(self): test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product = "commons-client-4.5.6" handle_maven_uploading( - test_zip, product, + [test_zip], product, targets=[('', TEST_BUCKET, '', '')], dir_=self.tempdir, gen_sign=True, @@ -63,7 +63,7 @@ def test_overlap_upload_index(self): test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product_456 = "commons-client-4.5.6" handle_maven_uploading( - test_zip, product_456, + [test_zip], product_456, targets=[('', TEST_BUCKET, '', '')], dir_=self.tempdir, gen_sign=True, @@ -73,7 +73,7 @@ def test_overlap_upload_index(self): test_zip = os.path.join(INPUTS, "commons-client-4.5.9.zip") product_459 = "commons-client-4.5.9" handle_maven_uploading( - test_zip, product_459, + [test_zip], product_459, targets=[('', TEST_BUCKET, '', '')], dir_=self.tempdir, gen_sign=True, diff --git a/tests/test_maven_upload.py b/tests/test_maven_upload.py index 6f40a8ca..fefa74ea 100644 --- a/tests/test_maven_upload.py +++ b/tests/test_maven_upload.py @@ -47,7 +47,7 @@ def test_overlap_upload(self): test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product_456 = "commons-client-4.5.6" handle_maven_uploading( - test_zip, product_456, + [test_zip], product_456, targets=[('', TEST_BUCKET, '', '')], dir_=self.tempdir, do_index=False ) @@ -55,7 +55,7 @@ def test_overlap_upload(self): test_zip = os.path.join(INPUTS, "commons-client-4.5.9.zip") product_459 = "commons-client-4.5.9" handle_maven_uploading( - test_zip, product_459, + [test_zip], product_459, targets=[('', TEST_BUCKET, '', '')], dir_=self.tempdir, do_index=False ) @@ -176,7 +176,7 @@ def test_ignore_upload(self): test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product_456 = "commons-client-4.5.6" handle_maven_uploading( - test_zip, product_456, [".*.sha1"], + [test_zip], product_456, [".*.sha1"], targets=[('', TEST_BUCKET, '', '')], dir_=self.tempdir, do_index=False ) @@ -205,7 +205,7 @@ def __test_prefix_upload(self, prefix: str): test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product = "commons-client-4.5.6" handle_maven_uploading( - test_zip, product, + [test_zip], product, targets=[('', TEST_BUCKET, prefix, '')], dir_=self.tempdir, do_index=False diff --git a/tests/test_maven_upload_multi_tgts.py b/tests/test_maven_upload_multi_tgts.py index 35aa49d4..f6eb289e 100644 --- a/tests/test_maven_upload_multi_tgts.py +++ b/tests/test_maven_upload_multi_tgts.py @@ -68,7 +68,7 @@ def test_overlap_upload(self): ('', TEST_BUCKET, '', ''), ('', TEST_BUCKET_2, '', '') ] handle_maven_uploading( - test_zip, product_456, + [test_zip], product_456, targets=targets_, dir_=self.tempdir, do_index=False ) @@ -76,7 +76,7 @@ def test_overlap_upload(self): test_zip = os.path.join(INPUTS, "commons-client-4.5.9.zip") product_459 = "commons-client-4.5.9" handle_maven_uploading( - test_zip, product_459, + [test_zip], product_459, targets=targets_, dir_=self.tempdir, do_index=False ) @@ -186,7 +186,7 @@ def test_ignore_upload(self): ('', TEST_BUCKET, '', ''), ('', TEST_BUCKET_2, '', '') ] handle_maven_uploading( - test_zip, product_456, [".*.sha1"], + [test_zip], product_456, [".*.sha1"], targets=targets_, dir_=self.tempdir, do_index=False ) @@ -221,7 +221,7 @@ def __test_prefix_upload(self, targets: List[Tuple[str, str, str, str]]): test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product = "commons-client-4.5.6" handle_maven_uploading( - test_zip, product, + [test_zip], product, targets=targets, dir_=self.tempdir, do_index=False diff --git a/tests/test_pkgs_dryrun.py b/tests/test_pkgs_dryrun.py index 46061734..c49ad14d 100644 --- a/tests/test_pkgs_dryrun.py +++ b/tests/test_pkgs_dryrun.py @@ -30,7 +30,7 @@ def test_maven_upload_dry_run(self): test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product = "commons-client-4.5.6" handle_maven_uploading( - test_zip, product, + [test_zip], product, targets=[('', TEST_BUCKET, '', '')], dir_=self.tempdir, dry_run=True @@ -90,7 +90,7 @@ def __prepare_maven_content(self): test_zip = os.path.join(INPUTS, "commons-client-4.5.6.zip") product_456 = "commons-client-4.5.6" handle_maven_uploading( - test_zip, product_456, + [test_zip], product_456, targets=[('', TEST_BUCKET, '', '')], dir_=self.tempdir ) @@ -98,7 +98,7 @@ def __prepare_maven_content(self): test_zip = os.path.join(INPUTS, "commons-client-4.5.9.zip") product_459 = "commons-client-4.5.9" handle_maven_uploading( - test_zip, product_459, + [test_zip], product_459, targets=[('', TEST_BUCKET, '', '')], dir_=self.tempdir ) From 45d3210bfa84849701fd3edb7e977f78c90e8e8d Mon Sep 17 00:00:00 2001 From: yma Date: Wed, 22 Oct 2025 10:27:35 +0800 Subject: [PATCH 20/23] Fix files duplicated logic for the merge overlapping case --- charon/pkgs/maven.py | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/charon/pkgs/maven.py b/charon/pkgs/maven.py index 90724050..6ac406c5 100644 --- a/charon/pkgs/maven.py +++ b/charon/pkgs/maven.py @@ -689,7 +689,7 @@ def _extract_tarballs(repos: List[str], root: str, prefix="", dir__=None) -> str final_tmp_root = mkdtemp(prefix=f"charon-{prefix}-final-", dir=dir__) total_copied = 0 - total_overwritten = 0 + total_duplicated = 0 total_processed = 0 # Collect all extracted directories first @@ -719,20 +719,20 @@ def _extract_tarballs(repos: List[str], root: str, prefix="", dir__=None) -> str # Merge content from all extracted directories for extracted_dir in extracted_dirs: - copied, overwritten, processed = _merge_directories_with_rename( + copied, duplicated, processed = _merge_directories_with_rename( extracted_dir, merged_dest_dir, root ) total_copied += copied - total_overwritten += overwritten + total_duplicated += duplicated total_processed += processed # Clean up temporary extraction directory rmtree(extracted_dir) logger.info( - "All zips merged! Total copied: %s, Total overwritten: %s, Total processed: %s", + "All zips merged! Total copied: %s, Total duplicated: %s, Total processed: %s", total_copied, - total_overwritten, + total_duplicated, total_processed, ) return final_tmp_root @@ -743,10 +743,10 @@ def _merge_directories_with_rename(src_dir: str, dest_dir: str, root: str): * src_dir is the source directory to copy from * dest_dir is the destination directory to copy to. - Returns Tuple of (copied_count, overwritten_count, processed_count) + Returns Tuple of (copied_count, duplicated_count, processed_count) """ copied_count = 0 - overwritten_count = 0 + duplicated_count = 0 processed_count = 0 # Find the actual content directory @@ -768,27 +768,27 @@ def _merge_directories_with_rename(src_dir: str, dest_dir: str, root: str): # Create destination directory if it doesn't exist os.makedirs(dest_root, exist_ok=True) - # Copy all files, overwriting existing ones + # Copy all files, skip existing ones for file in files: src_file = os.path.join(root_dir, file) dest_file = os.path.join(dest_root, file) if os.path.exists(dest_file): - overwritten_count += 1 - logger.debug("Overwritten: %s -> %s", src_file, dest_file) + duplicated_count += 1 + logger.debug("Duplicated: %s, skipped", dest_file) else: copied_count += 1 + copy2(src_file, dest_file) logger.debug("Copied: %s -> %s", src_file, dest_file) processed_count += 1 - copy2(src_file, dest_file) logger.info( - "One zip merged! Files copied: %s, Files overwritten: %s, Total files processed: %s", + "One zip merged! Files copied: %s, Files duplicated: %s, Total files processed: %s", copied_count, - overwritten_count, + duplicated_count, processed_count, ) - return copied_count, overwritten_count, processed_count + return copied_count, duplicated_count, processed_count def _scan_paths(files_root: str, ignore_patterns: List[str], From cd7ed7a10cde676307e45ee7e8cbae0e8fe92131 Mon Sep 17 00:00:00 2001 From: yma Date: Wed, 22 Oct 2025 15:03:01 +0800 Subject: [PATCH 21/23] Add archetype catalog files merged logic for merged zips --- charon/pkgs/maven.py | 89 +++++++++++++++++++++++++++++++++++--- tests/test_maven_upload.py | 1 + 2 files changed, 84 insertions(+), 6 deletions(-) diff --git a/charon/pkgs/maven.py b/charon/pkgs/maven.py index 6ac406c5..1cf23041 100644 --- a/charon/pkgs/maven.py +++ b/charon/pkgs/maven.py @@ -690,6 +690,7 @@ def _extract_tarballs(repos: List[str], root: str, prefix="", dir__=None) -> str total_copied = 0 total_duplicated = 0 + total_merged = 0 total_processed = 0 # Collect all extracted directories first @@ -719,20 +720,23 @@ def _extract_tarballs(repos: List[str], root: str, prefix="", dir__=None) -> str # Merge content from all extracted directories for extracted_dir in extracted_dirs: - copied, duplicated, processed = _merge_directories_with_rename( + copied, duplicated, merged, processed = _merge_directories_with_rename( extracted_dir, merged_dest_dir, root ) total_copied += copied total_duplicated += duplicated + total_merged += merged total_processed += processed # Clean up temporary extraction directory rmtree(extracted_dir) logger.info( - "All zips merged! Total copied: %s, Total duplicated: %s, Total processed: %s", + "All zips merged! Total copied: %s, Total duplicated: %s, " + "Total merged: %s, Total processed: %s", total_copied, total_duplicated, + total_merged, total_processed, ) return final_tmp_root @@ -743,10 +747,11 @@ def _merge_directories_with_rename(src_dir: str, dest_dir: str, root: str): * src_dir is the source directory to copy from * dest_dir is the destination directory to copy to. - Returns Tuple of (copied_count, duplicated_count, processed_count) + Returns Tuple of (copied_count, duplicated_count, merged_count, processed_count) """ copied_count = 0 duplicated_count = 0 + merged_count = 0 processed_count = 0 # Find the actual content directory @@ -772,23 +777,95 @@ def _merge_directories_with_rename(src_dir: str, dest_dir: str, root: str): for file in files: src_file = os.path.join(root_dir, file) dest_file = os.path.join(dest_root, file) + + if file == ARCHETYPE_CATALOG_FILENAME: + _handle_archetype_catalog_merge(src_file, dest_file) + merged_count += 1 + logger.debug("Merged archetype catalog: %s -> %s", src_file, dest_file) if os.path.exists(dest_file): duplicated_count += 1 logger.debug("Duplicated: %s, skipped", dest_file) else: - copied_count += 1 copy2(src_file, dest_file) + copied_count += 1 logger.debug("Copied: %s -> %s", src_file, dest_file) processed_count += 1 logger.info( - "One zip merged! Files copied: %s, Files duplicated: %s, Total files processed: %s", + "One zip merged! Files copied: %s, Files duplicated: %s, " + "Files merged: %s, Total files processed: %s", copied_count, duplicated_count, + merged_count, processed_count, ) - return copied_count, duplicated_count, processed_count + return copied_count, duplicated_count, merged_count, processed_count + + +def _handle_archetype_catalog_merge(src_catalog: str, dest_catalog: str): + """ + Handle merging of archetype-catalog.xml files during directory merge. + + Args: + src_catalog: Source archetype-catalog.xml file path + dest_catalog: Destination archetype-catalog.xml file path + """ + try: + with open(src_catalog, "rb") as sf: + src_archetypes = _parse_archetypes(sf.read()) + except ElementTree.ParseError as e: + logger.warning("Failed to read source archetype catalog %s: %s", src_catalog, e) + return + + if len(src_archetypes) < 1: + logger.warning( + "No archetypes found in source archetype-catalog.xml: %s, " + "even though the file exists! Skipping.", + src_catalog + ) + return + + # Copy directly if dest_catalog doesn't exist + if not os.path.exists(dest_catalog): + copy2(src_catalog, dest_catalog) + return + + try: + with open(dest_catalog, "rb") as df: + dest_archetypes = _parse_archetypes(df.read()) + except ElementTree.ParseError as e: + logger.warning("Failed to read dest archetype catalog %s: %s", dest_catalog, e) + return + + if len(dest_archetypes) < 1: + logger.warning( + "No archetypes found in dest archetype-catalog.xml: %s, " + "even though the file exists! Copy directly from the src_catalog, %s.", + dest_catalog, src_catalog + ) + copy2(src_catalog, dest_catalog) + return + + else: + original_dest_size = len(dest_archetypes) + for sa in src_archetypes: + if sa not in dest_archetypes: + dest_archetypes.append(sa) + else: + logger.debug("DUPLICATE ARCHETYPE: %s", sa) + + if len(dest_archetypes) != original_dest_size: + with open(dest_catalog, 'wb'): + content = MavenArchetypeCatalog(dest_archetypes).generate_meta_file_content() + try: + overwrite_file(dest_catalog, content) + except FileNotFoundError as e: + logger.error( + "Error: Can not create file %s because of some missing folders", + dest_catalog, + ) + raise e def _scan_paths(files_root: str, ignore_patterns: List[str], diff --git a/tests/test_maven_upload.py b/tests/test_maven_upload.py index fefa74ea..ab36c76f 100644 --- a/tests/test_maven_upload.py +++ b/tests/test_maven_upload.py @@ -168,6 +168,7 @@ def test_multi_zips_upload(self): catalog = self.test_bucket.Object(ARCHETYPE_CATALOG) cat_content = str(catalog.get()["Body"].read(), "utf-8") + self.assertIn("4.5.6", cat_content) self.assertIn("4.5.9", cat_content) self.assertIn("httpclient", cat_content) self.assertIn("org.apache.httpcomponents", cat_content) From 56c5a33b09ef47bf8c1aed2c13279c4ea5c4a187 Mon Sep 17 00:00:00 2001 From: yma Date: Thu, 23 Oct 2025 09:27:07 +0800 Subject: [PATCH 22/23] Fix unnecessary file open during archetype catalog merge --- charon/pkgs/maven.py | 16 ++++++---------- 1 file changed, 6 insertions(+), 10 deletions(-) diff --git a/charon/pkgs/maven.py b/charon/pkgs/maven.py index 1cf23041..25f8bc4f 100644 --- a/charon/pkgs/maven.py +++ b/charon/pkgs/maven.py @@ -856,16 +856,12 @@ def _handle_archetype_catalog_merge(src_catalog: str, dest_catalog: str): logger.debug("DUPLICATE ARCHETYPE: %s", sa) if len(dest_archetypes) != original_dest_size: - with open(dest_catalog, 'wb'): - content = MavenArchetypeCatalog(dest_archetypes).generate_meta_file_content() - try: - overwrite_file(dest_catalog, content) - except FileNotFoundError as e: - logger.error( - "Error: Can not create file %s because of some missing folders", - dest_catalog, - ) - raise e + content = MavenArchetypeCatalog(dest_archetypes).generate_meta_file_content() + try: + overwrite_file(dest_catalog, content) + except Exception as e: + logger.error("Failed to merge archetype catalog: %s", dest_catalog) + raise e def _scan_paths(files_root: str, ignore_patterns: List[str], From 9afe959ba70641d09465bf1434faa44d9510be3c Mon Sep 17 00:00:00 2001 From: Gang Li Date: Thu, 23 Oct 2025 14:23:22 +0800 Subject: [PATCH 23/23] Fix: remove two redundant open operations --- charon/pkgs/maven.py | 42 ++++++++++++++++++++---------------------- 1 file changed, 20 insertions(+), 22 deletions(-) diff --git a/charon/pkgs/maven.py b/charon/pkgs/maven.py index 25f8bc4f..02692e80 100644 --- a/charon/pkgs/maven.py +++ b/charon/pkgs/maven.py @@ -1032,17 +1032,16 @@ def _generate_rollback_archetype_catalog( else: # Re-render the result of our archetype un-merge to the # local file, in preparation for upload. - with open(local, 'wb') as f: - content = MavenArchetypeCatalog(remote_archetypes)\ - .generate_meta_file_content() - try: - overwrite_file(local, content) - except FileNotFoundError as e: - logger.error( - "Error: Can not create file %s because of some missing folders", - local, - ) - raise e + content = MavenArchetypeCatalog(remote_archetypes)\ + .generate_meta_file_content() + try: + overwrite_file(local, content) + except FileNotFoundError as e: + logger.error( + "Error: Can not create file %s because of some missing folders", + local, + ) + raise e __gen_all_digest_files(local) return 1 @@ -1148,17 +1147,16 @@ def _generate_upload_archetype_catalog( # Re-render the result of our archetype merge / # un-merge to the local file, in preparation for # upload. - with open(local, 'wb') as f: - content = MavenArchetypeCatalog(remote_archetypes)\ - .generate_meta_file_content() - try: - overwrite_file(local, content) - except FileNotFoundError as e: - logger.error( - "Error: Can not create file %s because of some missing folders", - local, - ) - raise e + content = MavenArchetypeCatalog(remote_archetypes)\ + .generate_meta_file_content() + try: + overwrite_file(local, content) + except FileNotFoundError as e: + logger.error( + "Error: Can not create file %s because of some missing folders", + local, + ) + raise e __gen_all_digest_files(local) return True