diff --git a/redisbench_admin/environments/oss_cluster.py b/redisbench_admin/environments/oss_cluster.py index 1c417b1..0c92ae9 100644 --- a/redisbench_admin/environments/oss_cluster.py +++ b/redisbench_admin/environments/oss_cluster.py @@ -28,7 +28,7 @@ def spin_up_local_redis_cluster( for master_shard_id in range(1, shard_count + 1): shard_port = master_shard_id + start_port - 1 - command = generate_cluster_redis_server_args( + command, _ = generate_cluster_redis_server_args( dbdir, local_module_file, ip, @@ -139,14 +139,18 @@ def generate_cluster_redis_server_args( configuration_parameters=None, daemonize="yes", modules_configuration_parameters_map={}, + logname_prefix=None, ): + if logname_prefix is None: + logname_prefix = "" + logfile = "{}cluster-node-port-{}.log".format(logname_prefix, port) # start redis-server command = [ "redis-server", "--appendonly", "no", "--logfile", - "cluster-node-port-{}.log".format(port), + logfile, "--cluster-enabled", "yes", "--daemonize", @@ -186,7 +190,7 @@ def generate_cluster_redis_server_args( redis_server_config_module_part( command, mod, modules_configuration_parameters_map ) - return command + return command, logfile def get_cluster_dbfilename(port): diff --git a/redisbench_admin/run/cluster.py b/redisbench_admin/run/cluster.py index 7486def..d802185 100644 --- a/redisbench_admin/run/cluster.py +++ b/redisbench_admin/run/cluster.py @@ -112,13 +112,16 @@ def spin_up_redis_cluster_remote_redis( start_port, ssh_port, modules_configuration_parameters_map, + logname, ): logging.info("Generating the remote redis-server command arguments") redis_process_commands = [] + logfiles = [] + logname_prefix = logname[: len(logname) - 4] + "-" for master_shard_id in range(1, shard_count + 1): shard_port = master_shard_id + start_port - 1 - command = generate_cluster_redis_server_args( + command, logfile = generate_cluster_redis_server_args( dbdir_folder, remote_module_files, server_private_ip, @@ -126,13 +129,16 @@ def spin_up_redis_cluster_remote_redis( redis_configuration_parameters, "yes", modules_configuration_parameters_map, + logname_prefix, ) logging.error( "Remote primary shard {} command: {}".format( master_shard_id, " ".join(command) ) ) + logfiles.append(logfile) redis_process_commands.append(" ".join(command)) execute_remote_commands( server_public_ip, username, private_key, redis_process_commands, ssh_port ) + return logfiles diff --git a/redisbench_admin/run_remote/remote_db.py b/redisbench_admin/run_remote/remote_db.py index dfb3cdf..eed171e 100644 --- a/redisbench_admin/run_remote/remote_db.py +++ b/redisbench_admin/run_remote/remote_db.py @@ -6,6 +6,8 @@ import datetime import logging +import redis + from redisbench_admin.environments.oss_cluster import setup_redis_cluster_from_conns from redisbench_admin.run.cluster import ( spin_up_redis_cluster_remote_redis, @@ -24,6 +26,7 @@ remote_dataset_folder, ) from redisbench_admin.run_remote.remote_client import run_remote_client_tool +from redisbench_admin.run_remote.remote_failures import failed_remote_run_artifact_store from redisbench_admin.run_remote.standalone import ( cp_local_dbdir_to_remote, remote_module_files_cp, @@ -84,6 +87,8 @@ def remote_db_spin( tf_github_sha, username, private_key, + s3_bucket_name, + s3_bucket_path, ): ( _, @@ -119,7 +124,7 @@ def remote_db_spin( redis_conns = [] topology_setup_start_time = datetime.datetime.now() if setup_type == "oss-cluster": - spin_up_redis_cluster_remote_redis( + logfiles = spin_up_redis_cluster_remote_redis( server_public_ip, server_private_ip, username, @@ -131,19 +136,39 @@ def remote_db_spin( cluster_start_port, db_ssh_port, modules_configuration_parameters_map, + logname, ) - - for p in range(cluster_start_port, cluster_start_port + shard_count): - local_redis_conn, ssh_tunnel = ssh_tunnel_redisconn( - p, - server_private_ip, - server_public_ip, + try: + for p in range(cluster_start_port, cluster_start_port + shard_count): + local_redis_conn, ssh_tunnel = ssh_tunnel_redisconn( + p, + server_private_ip, + server_public_ip, + username, + db_ssh_port, + private_key, + ) + local_redis_conn.ping() + redis_conns.append(local_redis_conn) + except redis.exceptions.ConnectionError as e: + logging.error("A error occurred while spinning DB: {}".format(e.__str__())) + remote_file = "{}/{}".format(temporary_dir, logfiles[0]) + logging.error( + "Trying to fetch DB remote log {} into {}".format( + remote_file, logfiles[0] + ) + ) + failed_remote_run_artifact_store( + True, + client_public_ip, + dirname, + remote_file, + logfiles[0], + s3_bucket_name, + s3_bucket_path, username, - db_ssh_port, private_key, ) - local_redis_conn.ping() - redis_conns.append(local_redis_conn) if setup_type == "oss-standalone": full_logfile = spin_up_standalone_remote_redis( diff --git a/redisbench_admin/run_remote/run_remote.py b/redisbench_admin/run_remote/run_remote.py index 70eea62..894e4f0 100644 --- a/redisbench_admin/run_remote/run_remote.py +++ b/redisbench_admin/run_remote/run_remote.py @@ -317,6 +317,8 @@ def run_remote_command_logic(args, project_name, project_version): tf_github_sha, username, private_key, + s3_bucket_name, + s3_bucket_path, ) if benchmark_type == "read-only": logging.info(