From c7cdb9e1bf6a1d0841cf5cd69139c05c33635908 Mon Sep 17 00:00:00 2001 From: Gang Li Date: Mon, 1 Apr 2024 15:36:15 +0800 Subject: [PATCH] Some updates * Add switch to disable file log handler * Use non-wildcard paths for invalidation * Fix a targets check in command upload and delete --- charon/cache.py | 11 ++++++++--- charon/cmd/cmd_cache.py | 7 +++++-- charon/cmd/cmd_delete.py | 6 ++++++ charon/cmd/cmd_index.py | 2 +- charon/cmd/cmd_upload.py | 7 +++++++ charon/cmd/internal.py | 17 +++++++++++++---- charon/pkgs/pkg_utils.py | 17 +++++++++++++---- charon/utils/logs.py | 8 ++++++-- 8 files changed, 59 insertions(+), 16 deletions(-) diff --git a/charon/cache.py b/charon/cache.py index 1dc84e5c..74406e4a 100644 --- a/charon/cache.py +++ b/charon/cache.py @@ -8,6 +8,8 @@ logger = logging.getLogger(__name__) ENDPOINT_ENV = "aws_endpoint_url" +INVALIDATION_BATCH_DEFAULT = 3000 +INVALIDATION_BATCH_WILDCARD = 15 DEFAULT_BUCKET_TO_DOMAIN = { "prod-maven-ga": "maven.repository.redhat.com", @@ -61,7 +63,7 @@ def __get_endpoint(self, extra_conf) -> str: def invalidate_paths( self, distr_id: str, paths: List[str], - batch_size: int = 15 + batch_size=INVALIDATION_BATCH_DEFAULT ) -> List[Dict[str, str]]: """Send a invalidating requests for the paths in distribution to CloudFront. This will invalidate the paths in the distribution to enforce the refreshment @@ -71,8 +73,7 @@ def invalidate_paths( get_dist_id_by_domain(domain) function * Can specify the invalidating paths through paths param. * Batch size is the number of paths to be invalidated in one request. - Because paths contains wildcard(*), so the default value is 15 which - is the maximum number in official doc: + The default value is 3000 which is the maximum number in official doc: https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/Invalidation.html#InvalidationLimits """ logger.debug("[CloudFront] Creating invalidation for paths: %s", paths) @@ -83,6 +84,10 @@ def invalidate_paths( results = [] for batch_paths in real_paths: caller_ref = str(uuid.uuid4()) + logger.debug( + "Processing invalidation for batch with ref %s, size: %s", + caller_ref, len(batch_paths) + ) try: response = self.__client.create_invalidation( DistributionId=distr_id, diff --git a/charon/cmd/cmd_cache.py b/charon/cmd/cmd_cache.py index e63bf222..95aae658 100644 --- a/charon/cmd/cmd_cache.py +++ b/charon/cmd/cmd_cache.py @@ -86,7 +86,7 @@ def cf_invalidate( """ _decide_mode( f"cfclear-{target}", "", - is_quiet=quiet, is_debug=debug + is_quiet=quiet, is_debug=debug, use_log_file=False ) if not paths and not path_file: logger.error( @@ -172,10 +172,13 @@ def cf_check( """ _decide_mode( f"cfcheck-{target}", "", - is_quiet=quiet, is_debug=debug + is_quiet=quiet, is_debug=debug, use_log_file=False ) try: (buckets, aws_profile) = _init_cmd(target) + if not buckets: + sys.exit(1) + for b in buckets: cf_client = CFClient(aws_profile=aws_profile) bucket_name = b[1] diff --git a/charon/cmd/cmd_delete.py b/charon/cmd/cmd_delete.py index dda57d2e..d4752f26 100644 --- a/charon/cmd/cmd_delete.py +++ b/charon/cmd/cmd_delete.py @@ -150,6 +150,12 @@ def delete( product_key = f"{product}-{version}" manifest_bucket_name = conf.get_manifest_bucket() buckets = _get_buckets(targets, conf) + if not buckets: + logger.error( + "The targets %s can not be found! Please check" + " your charon configuration to confirm the targets" + " are set correctly.", targets + ) if npm_archive_type != NpmArchiveType.NOT_NPM: logger.info("This is a npm archive") tmp_dir, succeeded = handle_npm_del( diff --git a/charon/cmd/cmd_index.py b/charon/cmd/cmd_index.py index 418b05e6..e27c5033 100644 --- a/charon/cmd/cmd_index.py +++ b/charon/cmd/cmd_index.py @@ -70,7 +70,7 @@ def index( """ _decide_mode( "index-{}".format(target), path.replace("/", "_"), - is_quiet=quiet, is_debug=debug + is_quiet=quiet, is_debug=debug, use_log_file=False ) try: conf = get_config() diff --git a/charon/cmd/cmd_upload.py b/charon/cmd/cmd_upload.py index 2fe19901..55696c2e 100644 --- a/charon/cmd/cmd_upload.py +++ b/charon/cmd/cmd_upload.py @@ -169,6 +169,13 @@ def upload( product_key = f"{product}-{version}" manifest_bucket_name = conf.get_manifest_bucket() buckets = _get_buckets(targets, conf) + if not buckets: + logger.error( + "The targets %s can not be found! Please check" + " your charon configuration to confirm the targets" + " are set correctly.", targets + ) + sys.exit(1) if npm_archive_type != NpmArchiveType.NOT_NPM: logger.info("This is a npm archive") tmp_dir, succeeded = handle_npm_uploading( diff --git a/charon/cmd/internal.py b/charon/cmd/internal.py index e901c8f4..11c92a0c 100644 --- a/charon/cmd/internal.py +++ b/charon/cmd/internal.py @@ -90,14 +90,23 @@ def _validate_prod_key(product: str, version: str) -> bool: return True -def _decide_mode(product: str, version: str, is_quiet: bool, is_debug: bool): +def _decide_mode( + product: str, version: str, is_quiet: bool, + is_debug: bool, use_log_file=True +): if is_quiet: logger.info("Quiet mode enabled, " "will only give warning and error logs.") - set_logging(product, version, level=logging.WARNING) + set_logging( + product, version, level=logging.WARNING, use_log_file=use_log_file + ) elif is_debug: logger.info("Debug mode enabled, " "will give all debug logs for tracing.") - set_logging(product, version, level=logging.DEBUG) + set_logging( + product, version, level=logging.DEBUG, use_log_file=use_log_file + ) else: - set_logging(product, version, level=logging.INFO) + set_logging( + product, version, level=logging.INFO, use_log_file=use_log_file + ) diff --git a/charon/pkgs/pkg_utils.py b/charon/pkgs/pkg_utils.py index ce0d20ff..88c31257 100644 --- a/charon/pkgs/pkg_utils.py +++ b/charon/pkgs/pkg_utils.py @@ -1,5 +1,9 @@ from typing import List, Tuple -from charon.cache import CFClient +from charon.cache import ( + CFClient, + INVALIDATION_BATCH_DEFAULT, + INVALIDATION_BATCH_WILDCARD +) import logging import os @@ -67,7 +71,7 @@ def invalidate_cf_paths( bucket: Tuple[str, str, str, str, str], invalidate_paths: List[str], root="/", - batch_size=15 + batch_size=INVALIDATION_BATCH_DEFAULT ): logger.info("Invalidating CF cache for %s", bucket[1]) bucket_name = bucket[1] @@ -85,14 +89,19 @@ def invalidate_cf_paths( if prefix: path = os.path.join(prefix, path) final_paths.append(path) - logger.debug("Invalidating paths: %s", final_paths) + logger.debug("Invalidating paths: %s, size: %s", final_paths, len(final_paths)) if not domain: domain = cf_client.get_domain_by_bucket(bucket_name) if domain: distr_id = cf_client.get_dist_id_by_domain(domain) if distr_id: + real_batch_size = batch_size + for path in final_paths: + if path.endswith('*'): + real_batch_size = INVALIDATION_BATCH_WILDCARD + break result = cf_client.invalidate_paths( - distr_id, final_paths, batch_size + distr_id, final_paths, real_batch_size ) if result: logger.info( diff --git a/charon/utils/logs.py b/charon/utils/logs.py index ed8469f9..9e273640 100644 --- a/charon/utils/logs.py +++ b/charon/utils/logs.py @@ -45,7 +45,10 @@ def __del__(self): pass -def set_logging(product: str, version: str, name="charon", level=logging.DEBUG, handler=None): +def set_logging( + product: str, version: str, name="charon", + level=logging.DEBUG, handler=None, use_log_file=True +): # create logger logger = logging.getLogger(name) for hdlr in list(logger.handlers): # make a copy so it doesn't change @@ -69,7 +72,8 @@ def set_logging(product: str, version: str, name="charon", level=logging.DEBUG, # add ch to logger logger.addHandler(handler) - set_log_file_handler(product, version, logger) + if use_log_file: + set_log_file_handler(product, version, logger) logger = logging.getLogger('charon') for hdlr in list(logger.handlers): # make a copy so it doesn't change