From 0bf1840caa4c6b4f3a0e621c36d5a83ebf85463d Mon Sep 17 00:00:00 2001 From: filipecosta90 Date: Fri, 11 Feb 2022 00:01:38 +0000 Subject: [PATCH] In case of failure of DB sping, retrieve the log and push to S3 --- pyproject.toml | 2 +- redisbench_admin/run_remote/remote_db.py | 63 ++++++++++++------- .../run_remote/remote_failures.py | 32 +++++----- 3 files changed, 61 insertions(+), 36 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 443b40a..8bc8c3d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "redisbench-admin" -version = "0.6.18" +version = "0.6.19" description = "Redis benchmark run helper. A wrapper around Redis and Redis Modules benchmark tools ( ftsb_redisearch, memtier_benchmark, redis-benchmark, aibench, etc... )." authors = ["filipecosta90 ","Redis Performance Group "] readme = "README.md" diff --git a/redisbench_admin/run_remote/remote_db.py b/redisbench_admin/run_remote/remote_db.py index eed171e..d2a6016 100644 --- a/redisbench_admin/run_remote/remote_db.py +++ b/redisbench_admin/run_remote/remote_db.py @@ -171,27 +171,48 @@ def remote_db_spin( ) if setup_type == "oss-standalone": - full_logfile = spin_up_standalone_remote_redis( - temporary_dir, - server_public_ip, - username, - private_key, - remote_module_files, - logname, - redis_configuration_parameters, - db_ssh_port, - modules_configuration_parameters_map, - ) - full_logfiles.append(full_logfile) - local_redis_conn, ssh_tunnel = ssh_tunnel_redisconn( - server_plaintext_port, - server_private_ip, - server_public_ip, - username, - db_ssh_port, - private_key, - ) - redis_conns.append(local_redis_conn) + + try: + full_logfile = spin_up_standalone_remote_redis( + temporary_dir, + server_public_ip, + username, + private_key, + remote_module_files, + logname, + redis_configuration_parameters, + db_ssh_port, + modules_configuration_parameters_map, + ) + full_logfiles.append(full_logfile) + local_redis_conn, ssh_tunnel = ssh_tunnel_redisconn( + server_plaintext_port, + server_private_ip, + server_public_ip, + username, + db_ssh_port, + private_key, + ) + redis_conns.append(local_redis_conn) + except redis.exceptions.ConnectionError as e: + logging.error("A error occurred while spinning DB: {}".format(e.__str__())) + remote_file = "{}/{}".format(temporary_dir, full_logfile) + logging.error( + "Trying to fetch DB remote log {} into {}".format( + remote_file, full_logfile + ) + ) + failed_remote_run_artifact_store( + True, + client_public_ip, + dirname, + remote_file, + full_logfile, + s3_bucket_name, + s3_bucket_path, + username, + private_key, + ) if cluster_enabled: setup_redis_cluster_from_conns( diff --git a/redisbench_admin/run_remote/remote_failures.py b/redisbench_admin/run_remote/remote_failures.py index 73dedda..26879d6 100644 --- a/redisbench_admin/run_remote/remote_failures.py +++ b/redisbench_admin/run_remote/remote_failures.py @@ -26,18 +26,22 @@ def failed_remote_run_artifact_store( remote_file, local_file_fullpath ) ) - fetch_file_from_remote_setup( - client_public_ip, - username, - private_key, - local_file_fullpath, - remote_file, - ) - if upload_results_s3: - logging.info( - "Uploading file {} to s3. s3 bucket name: {}. s3 bucket path: {}".format( - local_file_fullpath, s3_bucket_name, s3_bucket_path - ) + try: + fetch_file_from_remote_setup( + client_public_ip, + username, + private_key, + local_file_fullpath, + remote_file, ) - artifacts = [local_file_fullpath] - upload_artifacts_to_s3(artifacts, s3_bucket_name, s3_bucket_path) + except FileNotFoundError as f: + logging.error("Unable to fetch remote file: {}".format(f.__str__())) + finally: + if upload_results_s3: + logging.info( + "Uploading file {} to s3. s3 bucket name: {}. s3 bucket path: {}".format( + local_file_fullpath, s3_bucket_name, s3_bucket_path + ) + ) + artifacts = [local_file_fullpath] + upload_artifacts_to_s3(artifacts, s3_bucket_name, s3_bucket_path)