Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 3 additions & 2 deletions charon/cmd/command.py
Original file line number Diff line number Diff line change
Expand Up @@ -345,14 +345,15 @@ def delete(
__safe_delete(tmp_dir)


def __get_targets(target: List[str], conf: CharonConfig) -> List[Tuple[str, str, str]]:
def __get_targets(target: List[str], conf: CharonConfig) -> List[Tuple[str, str, str, str]]:
targets_ = []
for tgt in target:
aws_bucket = conf.get_aws_bucket(tgt)
if not aws_bucket:
continue
prefix = conf.get_bucket_prefix(tgt)
targets_.append([tgt, aws_bucket, prefix])
registry = conf.get_bucket_registry(tgt)
targets_.append([tgt, aws_bucket, prefix, registry])
if len(targets_) == 0:
logger.error(
"All targets are not valid or configured, "
Expand Down
18 changes: 16 additions & 2 deletions charon/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
import logging

from charon.utils.strings import remove_prefix
from charon.constants import DEFAULT_REGISTRY

CONFIG_FILE = "charon.yaml"

Expand Down Expand Up @@ -53,8 +54,8 @@ def get_aws_bucket(self, target: str) -> str:
return None
bucket = target_.get("bucket", None)
if not bucket:
logger.error("The bucket %s is not found for target %s "
"in charon configuration.")
logger.error("The bucket is not found for target %s "
"in charon configuration.", target)
return bucket

def get_bucket_prefix(self, target: str) -> str:
Expand All @@ -73,6 +74,19 @@ def get_bucket_prefix(self, target: str) -> str:
prefix = remove_prefix(prefix, "/")
return prefix

def get_bucket_registry(self, target: str) -> str:
target_: Dict = self.__targets.get(target, None)
if not target_ or not isinstance(target_, Dict):
logger.error("The target %s is not found in charon configuration.", target)
return None
registry = target_.get("registry", None)
if not registry:
registry = DEFAULT_REGISTRY
logger.error("The registry is not found for target %s "
"in charon configuration, so DEFAULT_REGISTRY(localhost) will be used.",
target)
return registry

def get_manifest_bucket(self) -> str:
return self.__manifest_bucket

Expand Down
2 changes: 2 additions & 0 deletions charon/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -173,3 +173,5 @@
PROD_INFO_SUFFIX = ".prodinfo"
MANIFEST_SUFFIX = ".txt"
DEFAULT_ERRORS_LOG = "errors.log"

DEFAULT_REGISTRY = "localhost"
4 changes: 2 additions & 2 deletions charon/pkgs/maven.py
Original file line number Diff line number Diff line change
Expand Up @@ -256,7 +256,7 @@ def handle_maven_uploading(
prod_key: str,
ignore_patterns=None,
root="maven-repository",
targets: List[Tuple[str, str, str]] = None,
targets: List[Tuple[str, str, str, str]] = None,
aws_profile=None,
dir_=None,
do_index=True,
Expand Down Expand Up @@ -418,7 +418,7 @@ def handle_maven_del(
prod_key: str,
ignore_patterns=None,
root="maven-repository",
targets: List[Tuple[str, str, str]] = None,
targets: List[Tuple[str, str, str, str]] = None,
aws_profile=None,
dir_=None,
do_index=True,
Expand Down
89 changes: 46 additions & 43 deletions charon/pkgs/npm.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
from charon.pkgs.pkg_utils import upload_post_process, rollback_post_process
from charon.utils.strings import remove_prefix
from charon.utils.files import write_manifest
from charon.utils.map import del_none

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -65,7 +66,7 @@ def __init__(self, metadata, is_version):
def handle_npm_uploading(
tarball_path: str,
product: str,
targets: List[Tuple[str, str, str]] = None,
targets: List[Tuple[str, str, str, str]] = None,
aws_profile=None,
dir_=None,
do_index=True,
Expand All @@ -86,48 +87,60 @@ def handle_npm_uploading(

Returns the directory used for archive processing and if uploading is successful
"""
target_dir, valid_paths, package_metadata = _scan_metadata_paths_from_archive(
tarball_path, prod=product, dir__=dir_
)
if not os.path.isdir(target_dir):
logger.error("Error: the extracted target_dir path %s does not exist.", target_dir)
sys.exit(1)

valid_dirs = __get_path_tree(valid_paths, target_dir)

# main_target = targets[0]
client = S3Client(aws_profile=aws_profile, dry_run=dry_run)
targets_ = [(target[1], remove_prefix(target[2], "/")) for target in targets]
logger.info(
"Start uploading files to s3 buckets: %s",
[target[1] for target in targets]
)
failed_files = client.upload_files(
file_paths=valid_paths,
targets=targets_,
product=product,
root=target_dir
)
logger.info("Files uploading done\n")

succeeded = True
for target in targets:
bucket_ = target[1]
prefix__ = remove_prefix(target[2], "/")
registry__ = target[3]
target_dir, valid_paths, package_metadata = _scan_metadata_paths_from_archive(
tarball_path, registry__, prod=product, dir__=dir_
)
if not os.path.isdir(target_dir):
logger.error("Error: the extracted target_dir path %s does not exist.", target_dir)
sys.exit(1)
valid_dirs = __get_path_tree(valid_paths, target_dir)

logger.info("Start uploading files to s3 buckets: %s", bucket_)
failed_files = client.upload_files(
file_paths=[valid_paths[0]],
targets=[(bucket_, prefix__)],
product=product,
root=target_dir
)
logger.info("Files uploading done\n")

succeeded = True

if not manifest_bucket_name:
logger.warning(
'Warning: No manifest bucket is provided, will ignore the process of manifest '
'uploading\n')
else:
logger.info("Start uploading manifest to s3 bucket %s", manifest_bucket_name)
manifest_folder = target[1]
manifest_folder = bucket_
manifest_name, manifest_full_path = write_manifest(valid_paths, target_dir, product)

client.upload_manifest(
manifest_name, manifest_full_path,
manifest_folder, manifest_bucket_name
)
logger.info("Manifest uploading is done\n")

bucket_ = target[1]
prefix__ = remove_prefix(target[2], "/")
logger.info(
"Start generating version-level package.json for package: %s in s3 bucket %s",
package_metadata.name, bucket_
)
failed_metas = []
_version_metadata_path = valid_paths[1]
_failed_metas = client.upload_metadatas(
meta_file_paths=[_version_metadata_path],
target=(bucket_, prefix__),
product=product,
root=target_dir
)
failed_metas.extend(_failed_metas)
logger.info("version-level package.json uploading done")

logger.info(
"Start generating package.json for package: %s in s3 bucket %s",
package_metadata.name, bucket_
Expand All @@ -137,7 +150,6 @@ def handle_npm_uploading(
)
logger.info("package.json generation done\n")

failed_metas = []
if META_FILE_GEN_KEY in meta_files:
_failed_metas = client.upload_metadatas(
meta_file_paths=[meta_files[META_FILE_GEN_KEY]],
Expand Down Expand Up @@ -178,7 +190,7 @@ def handle_npm_uploading(
def handle_npm_del(
tarball_path: str,
product: str,
targets: List[Tuple[str, str, str]] = None,
targets: List[Tuple[str, str, str, str]] = None,
aws_profile=None,
dir_=None,
do_index=True,
Expand Down Expand Up @@ -381,11 +393,11 @@ def _gen_npm_package_metadata_for_del(
return meta_files


def _scan_metadata_paths_from_archive(path: str, prod="", dir__=None) -> Tuple[str, list,
NPMPackageMetadata]:
def _scan_metadata_paths_from_archive(path: str, registry: str, prod="", dir__=None) ->\
Tuple[str, list, NPMPackageMetadata]:
tmp_root = mkdtemp(prefix=f"npm-charon-{prod}-", dir=dir__)
try:
_, valid_paths = extract_npm_tarball(path, tmp_root, True)
_, valid_paths = extract_npm_tarball(path, tmp_root, True, registry)
if len(valid_paths) > 1:
version = _scan_for_version(valid_paths[1])
package = NPMPackageMetadata(version, True)
Expand Down Expand Up @@ -502,23 +514,14 @@ def _write_package_metadata_to_file(package_metadata: NPMPackageMetadata, root='
final_package_metadata_path = os.path.join(root, package_metadata.name, PACKAGE_JSON)
try:
with open(final_package_metadata_path, mode='w', encoding='utf-8') as f:
dump(_del_none(package_metadata.__dict__.copy()), f)
dump(del_none(package_metadata.__dict__.copy()), f)
return final_package_metadata_path
except FileNotFoundError:
logger.error(
'Can not create file %s because of some missing folders', final_package_metadata_path
)


def _del_none(d):
for key, value in list(d.items()):
if value is None:
del d[key]
elif isinstance(value, dict):
_del_none(value)
return d


def __get_path_tree(paths: str, prefix: str) -> Set[str]:
valid_dirs = set()
for f in paths:
Expand Down
21 changes: 12 additions & 9 deletions charon/storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -383,15 +383,18 @@ async def path_upload_handler(
# NOTE: This should not happen for most cases, as most
# of the metadata file does not have product info. Just
# leave for requirement change in future
(prods, no_error) = await self.__run_async(
self.__get_prod_info,
path_key, bucket_name
)
if not no_error:
failed.append(full_file_path)
return
if no_error and product not in prods:
prods.append(product)
# This is now used for npm version-level package.json
prods = [product]
if existed:
(prods, no_error) = await self.__run_async(
self.__get_prod_info,
path_key, bucket_name
)
if not no_error:
failed.append(full_file_path)
return
if no_error and product not in prods:
prods.append(product)
updated = await self.__update_prod_info(
path_key, bucket_name, prods
)
Expand Down
33 changes: 28 additions & 5 deletions charon/utils/archive.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,14 @@
import requests
import tempfile
import shutil
import subresource_integrity
from enum import Enum
from json import load, JSONDecodeError
from json import load, JSONDecodeError, dump
from typing import Tuple
from zipfile import ZipFile, is_zipfile
from charon.constants import DEFAULT_REGISTRY
from charon.utils.files import digest, HashType
from charon.utils.map import del_none

logger = logging.getLogger(__name__)

Expand All @@ -42,7 +46,8 @@ def extract_zip_with_files(zf: ZipFile, target_dir: str, file_suffix: str, debug
zf.extractall(target_dir, members=filtered)


def extract_npm_tarball(path: str, target_dir: str, is_for_upload: bool) -> Tuple[str, list]:
def extract_npm_tarball(path: str, target_dir: str, is_for_upload: bool, registry=DEFAULT_REGISTRY)\
-> Tuple[str, list]:
""" Extract npm tarball will relocate the tgz file and metadata files.
* Locate tar path ( e.g.: jquery/-/jquery-7.6.1.tgz or @types/jquery/-/jquery-2.2.3.tgz).
* Locate version metadata path (e.g.: jquery/7.6.1 or @types/jquery/2.2.3).
Expand All @@ -54,7 +59,7 @@ def extract_npm_tarball(path: str, target_dir: str, is_for_upload: bool) -> Tupl
tgz.extractall()
for f in tgz:
if f.name.endswith("package.json"):
parse_paths = __parse_npm_package_version_paths(f.path)
version_data, parse_paths = __parse_npm_package_version_paths(f.path)
package_name_path = parse_paths[0]
os.makedirs(os.path.join(target_dir, parse_paths[0]))
tarball_parent_path = os.path.join(target_dir, parse_paths[0], "-")
Expand All @@ -63,7 +68,11 @@ def extract_npm_tarball(path: str, target_dir: str, is_for_upload: bool) -> Tupl
target_dir, parse_paths[0], parse_paths[1]
)
valid_paths.append(os.path.join(version_metadata_parent_path, "package.json"))

if is_for_upload:
tgz_relative_path = "/".join([parse_paths[0], "-", _get_tgz_name(path)])
__write_npm_version_dist(path, f.path, version_data, tgz_relative_path, registry)

os.makedirs(tarball_parent_path)
target = os.path.join(tarball_parent_path, os.path.basename(path))
shutil.copyfile(path, target)
Expand All @@ -81,12 +90,26 @@ def _get_tgz_name(path: str):
return ""


def __parse_npm_package_version_paths(path: str) -> list:
def __write_npm_version_dist(path: str, version_meta_extract_path: str, version_data: dict,
tgz_relative_path: str, registry: str):
dist = dict()
dist["tarball"] = "".join(["https://", registry, "/", tgz_relative_path])
dist["shasum"] = digest(path, HashType.SHA1)
with open(path, "rb") as tarball:
tarball_data = tarball.read()
integrity = subresource_integrity.render(tarball_data, ['sha512'])
dist["integrity"] = integrity
version_data["dist"] = dist
with open(version_meta_extract_path, mode='w', encoding='utf-8') as f:
dump(del_none(version_data), f)


def __parse_npm_package_version_paths(path: str) -> Tuple[dict, list]:
try:
with open(path, encoding='utf-8') as version_package:
data = load(version_package)
package_version_paths = [data['name'], data['version']]
return package_version_paths
return data, package_version_paths
except JSONDecodeError:
logger.error('Error: Failed to parse json!')

Expand Down
7 changes: 7 additions & 0 deletions charon/utils/map.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
def del_none(d):
for key, value in list(d.items()):
if value is None:
del d[key]
elif isinstance(value, dict):
del_none(value)
return d
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -7,3 +7,4 @@ click==8.0.3
requests==2.27.1
ruamel.yaml==0.17.20
defusedxml==0.7.1
subresource-integrity==0.2
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@

from setuptools import setup, find_packages

version = "1.1.0"
version = "1.1.1"

# f = open('README.md')
# long_description = f.read().strip()
Expand Down
4 changes: 4 additions & 0 deletions tests/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,10 @@ def setUp(self):
ea:
bucket: "charon-test-ea"
prefix: earlyaccess/all

npm:
bucket: "charon-test-npm"
registry: "npm1.registry.redhat.com"
"""
self.prepare_config(config_base, default_config_content)

Expand Down
2 changes: 2 additions & 0 deletions tests/commons.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,8 @@
"@babel/code-frame/-/code-frame-7.15.8.tgz",
]
CODE_FRAME_META = "@babel/code-frame/package.json"

CODE_FRAME_7_14_5_META = "@babel/code-frame/7.14.5/package.json"
# For npm indexes
CODE_FRAME_7_14_5_INDEXES = [
"@babel/code-frame/7.14.5/index.html",
Expand Down
Loading