From c31eed43384029e62d0cb684883684718f6cab3f Mon Sep 17 00:00:00 2001 From: Devansh Jain <86314060+devanshjainms@users.noreply.github.com> Date: Tue, 24 Jun 2025 14:32:27 -0700 Subject: [PATCH 1/6] Extend support for SAPHanaSR-angi provider --- docs/HIGH_AVAILABILITY.md | 46 +- requirements.txt | 55 +-- scripts/sap_automation_qa.sh | 159 ++++++- scripts/setup.sh | 6 +- src/module_utils/commands.py | 13 +- src/module_utils/enums.py | 138 ++++++ src/module_utils/filter_tests.py | 134 ++++++ src/module_utils/get_cluster_status.py | 102 +++-- src/module_utils/sap_automation_qa.py | 67 +-- src/modules/check_indexserver.py | 35 +- src/modules/filesystem_freeze.py | 2 +- src/modules/get_azure_lb.py | 29 +- src/modules/get_cluster_status_db.py | 100 +++-- src/modules/get_cluster_status_scs.py | 17 +- src/modules/get_pcmk_properties_db.py | 80 ++-- src/modules/get_pcmk_properties_scs.py | 41 +- src/modules/location_constraints.py | 28 +- src/modules/log_parser.py | 30 +- src/modules/send_telemetry_data.py | 18 +- src/roles/ha_db_hana/tasks/block-network.yml | 8 +- .../ha_db_hana/tasks/files/constants.yaml | 82 +++- src/roles/ha_db_hana/tasks/fs-freeze.yml | 4 +- src/roles/ha_db_hana/tasks/ha-config.yml | 6 +- .../ha_db_hana/tasks/primary-crash-index.yml | 7 +- src/roles/ha_db_hana/tasks/primary-echo-b.yml | 6 +- .../ha_db_hana/tasks/primary-node-crash.yml | 4 +- .../ha_db_hana/tasks/primary-node-kill.yml | 6 +- .../ha_db_hana/tasks/resource-migration.yml | 26 +- src/roles/ha_db_hana/tasks/sbd-fencing.yml | 4 +- .../tasks/secondary-crash-index.yml | 5 +- .../ha_db_hana/tasks/secondary-echo-b.yml | 4 +- .../ha_db_hana/tasks/secondary-node-kill.yml | 6 +- src/roles/ha_scs/tasks/ascs-migration.yml | 1 - src/roles/ha_scs/tasks/ascs-node-crash.yml | 3 - src/roles/ha_scs/tasks/block-network.yml | 2 - src/roles/ha_scs/tasks/ha-config.yml | 1 - .../ha_scs/tasks/ha-failover-to-node.yml | 1 - .../ha_scs/tasks/kill-enqueue-replication.yml | 2 - .../ha_scs/tasks/kill-enqueue-server.yml | 3 - .../ha_scs/tasks/kill-message-server.yml | 3 - .../ha_scs/tasks/kill-sapstartsrv-process.yml | 1 - src/roles/ha_scs/tasks/manual-restart.yml | 2 - src/roles/misc/tasks/cluster-report.yml | 3 +- .../misc/tasks/get-saphanasr-provider.yml | 28 ++ src/roles/misc/tasks/post-validations.yml | 1 - src/roles/misc/tasks/pre-validations-db.yml | 11 +- src/roles/misc/tasks/pre-validations-scs.yml | 1 - src/roles/misc/tasks/rescue.yml | 1 - src/roles/misc/tasks/var-log-messages.yml | 1 - src/vars/input-api.yaml | 3 + tests/module_utils/filter_tests_test.py | 424 ++++++++++++++++++ tests/module_utils/get_cluster_status_test.py | 179 +++++++- tests/module_utils/sap_automation_qa_test.py | 4 +- tests/modules/check_indexserver_test.py | 46 +- tests/modules/get_cluster_status_db_test.py | 185 ++++++-- tests/modules/get_cluster_status_scs_test.py | 3 + tests/modules/get_pcmk_properties_db_test.py | 267 ++++++++++- tests/modules/get_pcmk_properties_scs_test.py | 199 +++++++- tests/modules/location_constraints_test.py | 19 +- tests/modules/log_parser_test.py | 141 +++++- tests/roles/ha_db_hana/block_network_test.py | 5 +- tests/roles/ha_db_hana/ha_config_test.py | 1 + .../roles/ha_db_hana/primary_node_ops_test.py | 1 + .../ha_db_hana/resource_migration_test.py | 5 + .../ha_db_hana/secondary_node_ops_test.py | 1 + .../mock_data/SAPHanaSR-manageProvider.txt | 5 + tests/roles/mock_data/check_indexserver.txt | 1 - .../roles/mock_data/get_cluster_status_db.txt | 2 +- .../mock_data/get_cluster_status_scs.txt | 1 - .../mock_data/get_pcmk_properties_db.txt | 2 +- .../mock_data/get_pcmk_properties_scs.txt | 1 - .../roles/mock_data/location_constraints.txt | 2 - tests/roles/mock_data/log_parser.txt | 1 - .../secondary_get_cluster_status_db.txt | 2 +- tests/roles/roles_testing_base.py | 1 + 75 files changed, 2354 insertions(+), 480 deletions(-) create mode 100644 src/module_utils/enums.py create mode 100644 src/module_utils/filter_tests.py create mode 100644 src/roles/misc/tasks/get-saphanasr-provider.yml create mode 100644 tests/module_utils/filter_tests_test.py create mode 100644 tests/roles/mock_data/SAPHanaSR-manageProvider.txt diff --git a/docs/HIGH_AVAILABILITY.md b/docs/HIGH_AVAILABILITY.md index 071036c4..d0f25736 100644 --- a/docs/HIGH_AVAILABILITY.md +++ b/docs/HIGH_AVAILABILITY.md @@ -21,8 +21,10 @@ Currently SAP Testing Automation Framework is supported for below Linux distros |-----------|------|--------------|---------| | SAP Central Services | ENSA1 or ENSA2 | Azure Fencing Agent | Azure Files or ANF | | SAP Central Services | ENSA1 or ENSA2 | ISCSI (SBD device) | Azure Files or ANF | +| SAP Central Services | ENSA1 or ENSA2 | Azure Shared Disks (SBD device) | Azure Files or ANF | | SAP HANA | Scale-up | Azure Fencing Agent | Azure Managed Disk or ANF | | SAP HANA | Scale-up | ISCSI (SBD device) | Azure Managed Disk or ANF | +| SAP HANA | Scale-up | Azure Shared Disks (SBD device) | Azure Managed Disk or ANF | For SAP Central Services on SLES, both the simple mount approach and the classic method are supported. @@ -238,9 +240,10 @@ db_high_availability: true # The high availability configuration of the SCS and DB instance. Supported values are: # - AFA (for Azure Fencing Agent) -# - ISCSI (for SBD devices) -scs_cluster_type: "AFA" # or "ISCSI" -database_cluster_type: "AFA" # or "ISCSI" +# - ISCSI (for SBD devices with ISCSI target servers) +# - ASD (for SBD devices with Azure Shared Disks) +scs_cluster_type: "AFA" # or "ISCSI" or "ASD" +database_cluster_type: "AFA" # or "ISCSI" or "ASD" # The instance number of the SCS, ERS and DB instance. scs_instance_number: "00" @@ -273,23 +276,54 @@ key_vault_id: /subscriptions//resourceGroups/< secret_id: https://.vault.azure.net/secrets// ``` -2.2.3. Credential Files +2.2.3. **Credential Files** (Available locally) The required credential files depend on the authentication method used to connect to the SAP system: -1. SSH Key Authentication: If connecting via SSH key, place the private key inside `WORKSPACE/SYSTEM/` and name the file "ssh_key.ppk". -1. Username and Password Authentication: If connecting using a username and password, create a password file by running the following command. It takes the username from hosts.yaml file. +1. **SSH Key Authentication**: If connecting via SSH key, place the private key inside `WORKSPACE/SYSTEM/` and name the file "ssh_key.ppk". +1. **Password Authentication**: If connecting using a username and password, create a password file by running the following command. It takes the username from hosts.yaml file. ```bash echo "password" > WORKSPACES/SYSTEM//password ``` +2.2.4. **Credential Files** (From Azure Key Vault) + +When using Azure Key Vault to store credentials, the framework retrieves authentication details directly from the key vault using the configured managed identity. + + **Authentication Methods:** + + 1. **SSH Key Authentication**: Store the private SSH key content in Azure Key Vault as a secret. + 2. **Password Authentication**: Store the password in Azure Key Vault as a secret. The username is taken from the `hosts.yaml` file. + + **Setup:** + + 1. Ensure the managed identity has "Key Vault Secrets User" role on the key vault. + + 2. Configure `key_vault_id` and `secret_id` parameters in `sap-parameters.yaml` as shown in section 2.2.2. + + **Important**: When using Key Vault authentication, do NOT create local credential files (`ssh_key.ppk` or `password` files). + + ### 3. Test Execution To execute the script, run following command: ```bash +# Run all the tests with default parameters ./scripts/sap_automation_qa.sh + +# Run specific test cases from HA_DB_HANA group +./scripts/sap_automation_qa.sh --test_groups=HA_DB_HANA --test_cases=[ha-config,primary-node-crash] + +# Run all enabled tests in HA_DB_HANA group +./scripts/sap_automation_qa.sh --test_groups=HA_DB_HANA + +# Run all enabled tests in HA_SCS group +./scripts/sap_automation_qa.sh --test_groups=HA_SCS + +# Run with verbose output +./scripts/sap_automation_qa.sh --test_groups=HA_DB_HANA --test_cases=[ha-config] -vv ``` ### 4. Viewing Test Results diff --git a/requirements.txt b/requirements.txt index 2e8902b2..8220ba41 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,18 +4,18 @@ # # pip-compile requirements.in # -ansible-compat==25.1.5 +ansible-compat==25.5.0 # via ansible-lint -ansible-core==2.17.11 +ansible-core==2.17.12 # via # -r requirements.in # ansible-compat # ansible-lint -ansible-lint==25.4.0 +ansible-lint==25.5.0 # via -r requirements.in ansible-runner==2.4.1 # via -r requirements.in -astroid==3.3.9 +astroid==3.3.10 # via pylint attrs==25.3.0 # via @@ -30,7 +30,7 @@ azure-core==1.34.0 # azure-mgmt-core # azure-storage-blob # azure-storage-queue -azure-identity==1.21.0 +azure-identity==1.23.0 # via # -r requirements.in # azure-kusto-data @@ -42,7 +42,7 @@ azure-kusto-ingest==5.0.3 # via -r requirements.in azure-mgmt-core==1.5.0 # via azure-mgmt-network -azure-mgmt-network==28.1.0 +azure-mgmt-network==29.0.0 # via -r requirements.in azure-storage-blob==12.23.0 # via @@ -64,15 +64,15 @@ cffi==1.17.1 # via cryptography charset-normalizer==3.4.2 # via requests -click==8.1.8 +click==8.2.1 # via # -r requirements.in # black -coverage[toml]==7.8.0 +coverage[toml]==7.9.0 # via # -r requirements.in # pytest-cov -cryptography==44.0.3 +cryptography==45.0.4 # via # ansible-core # azure-identity @@ -82,13 +82,13 @@ cryptography==44.0.3 # pyjwt dill==0.4.0 # via pylint -exceptiongroup==1.2.2 +exceptiongroup==1.3.0 # via pytest filelock==3.18.0 # via ansible-lint idna==3.10 # via requests -ijson==3.3.0 +ijson==3.4.0 # via azure-kusto-data importlib-metadata==8.7.0 # via ansible-lint @@ -107,7 +107,7 @@ jinja2==3.1.6 # ansible-core jmespath==1.0.1 # via -r requirements.in -jsonschema==4.23.0 +jsonschema==4.24.0 # via # ansible-compat # ansible-lint @@ -132,7 +132,7 @@ msal-extensions==1.3.1 # via azure-identity mypy-extensions==1.1.0 # via black -numpy==2.2.5 +numpy==2.2.6 # via # -r requirements.in # pandas @@ -144,7 +144,7 @@ packaging==25.0 # ansible-runner # black # pytest -pandas==2.2.3 +pandas==2.3.0 # via -r requirements.in pathspec==0.12.1 # via @@ -153,32 +153,34 @@ pathspec==0.12.1 # yamllint pexpect==4.9.0 # via ansible-runner -platformdirs==4.3.7 +platformdirs==4.3.8 # via # black # pylint -pluggy==1.5.0 +pluggy==1.6.0 # via pytest ptyprocess==0.7.0 # via pexpect pycparser==2.22 # via cffi pygments==2.19.1 - # via rich + # via + # pytest + # rich pyjwt[crypto]==2.10.1 # via # msal # pyjwt pylint==3.3.7 # via -r requirements.in -pytest==8.3.5 +pytest==8.4.0 # via # -r requirements.in # pytest-cov # pytest-mock -pytest-cov==6.1.1 +pytest-cov==6.2.0 # via -r requirements.in -pytest-mock==3.14.0 +pytest-mock==3.14.1 # via -r requirements.in python-daemon==3.1.2 # via ansible-runner @@ -201,7 +203,7 @@ referencing==0.36.2 # ansible-lint # jsonschema # jsonschema-specifications -requests==2.32.3 +requests==2.32.4 # via # -r requirements.in # azure-core @@ -211,11 +213,11 @@ resolvelib==1.0.1 # via ansible-core rich==14.0.0 # via -r requirements.in -rpds-py==0.24.0 +rpds-py==0.25.1 # via # jsonschema # referencing -ruamel-yaml==0.18.10 +ruamel-yaml==0.18.14 # via ansible-lint ruamel-yaml-clib==0.2.12 # via ruamel-yaml @@ -235,9 +237,9 @@ tomli==2.2.1 # coverage # pylint # pytest -tomlkit==0.13.2 +tomlkit==0.13.3 # via pylint -typing-extensions==4.13.2 +typing-extensions==4.14.0 # via # astroid # azure-core @@ -246,6 +248,7 @@ typing-extensions==4.13.2 # azure-storage-blob # azure-storage-queue # black + # exceptiongroup # referencing # rich tzdata==2025.2 @@ -256,5 +259,5 @@ wcmatch==10.0 # via ansible-lint yamllint==1.37.1 # via ansible-lint -zipp==3.21.0 +zipp==3.23.0 # via importlib-metadata diff --git a/scripts/sap_automation_qa.sh b/scripts/sap_automation_qa.sh index 2119e08e..a2b29da3 100755 --- a/scripts/sap_automation_qa.sh +++ b/scripts/sap_automation_qa.sh @@ -21,6 +21,57 @@ NC='\033[0m' # Global variable to store the path of the temporary file. temp_file="" +# Parse command line arguments and extract verbose flags +# Sets global ANSIBLE_VERBOSE variable +parse_arguments() { + ANSIBLE_VERBOSE="" + + for arg in "$@"; do + case "$arg" in + -v|-vv|-vvv|-vvvv|-vvvvv|-vvvvvv) + ANSIBLE_VERBOSE="$arg" + ;; + --test_groups=*) + TEST_GROUPS="${arg#*=}" + ;; + --test_cases=*) + TEST_CASES="${arg#*=}" + # Remove brackets and convert to array + TEST_CASES="${TEST_CASES#[}" + TEST_CASES="${TEST_CASES%]}" + ;; + --extra-vars=*) + EXTRA_VARS="${arg#*=}" + ;; + -h|--help) + show_usage + exit 0 + ;; + esac + done +} + +show_usage() { + cat << EOF +Usage: $0 [OPTIONS] + +Options: + -v, -vv, -vvv, etc. Set Ansible verbosity level + --test_groups=GROUP Specify test group to run (e.g., HA_DB_HANA, HA_SCS) + --test_cases=[case1,case2] Specify specific test cases to run (comma-separated, in brackets) + --extra-vars=VAR Specify additional Ansible extra variables (e.g., --extra-vars='{"key":"value"}') + -h, --help Show this help message + +Examples: + $0 --test_groups=HA_DB_HANA --test_cases=[ha-config,primary-node-crash] + $0 --test_groups=HA_SCS + $0 --test_groups=HA_DB_HANA --test_cases=[ha-config,primary-node-crash] -vv + $0 --test_groups=HA_DB_HANA --test_cases=[ha-config,primary-node-crash] --extra-vars='{"key":"value"}' + +Configuration is read from vars.yaml file. +EOF +} + # Print logs with color based on severity. # :param severity: The severity level of the log (e.g., "INFO", "ERROR"). # :param message: The message to log. @@ -127,6 +178,40 @@ get_playbook_name() { esac } +# Generate filtered test configuration as JSON for Ansible extra vars +# :return: JSON string with filtered test configuration +get_filtered_test_config() { + local input_api_file="${cmd_dir}/../src/vars/input-api.yaml" + local test_filter_script="${cmd_dir}/../src/module_utils/filter_tests.py" + + if [[ ! -f "$test_filter_script" ]]; then + log "ERROR" "Test filter script not found: $test_filter_script" >&2 + exit 1 + fi + + local group_arg="null" + local cases_arg="null" + + if [[ -n "$TEST_GROUPS" ]]; then + group_arg="$TEST_GROUPS" + fi + + if [[ -n "$TEST_CASES" ]]; then + cases_arg="$TEST_CASES" + fi + + local filtered_config + filtered_config=$(python3 "$test_filter_script" "$input_api_file" "$group_arg" "$cases_arg" 2>&1) + local exit_code=$? + + if [[ $exit_code -ne 0 ]]; then + log "ERROR" "Failed to filter test configuration: $filtered_config" >&2 + exit 1 + fi + + echo "$filtered_config" +} + # Retrieve a secret from Azure Key Vault. # :param key_vault_id: The ID of the Key Vault. # :param secret_id: The ID of the secret in the Key Vault. @@ -184,7 +269,7 @@ retrieve_secret_from_key_vault() { if [[ -f "$temp_file" ]]; then log "ERROR" "Temporary file already exists: $temp_file" exit 1 - fi + fi # Create the temporary file and write the secret value to it echo "$secret_value" > "$temp_file" @@ -210,6 +295,22 @@ run_ansible_playbook() { local auth_type=$4 local system_config_folder=$5 + + local extra_vars="" + if [[ -n "$TEST_GROUPS" || -n "$TEST_CASES" ]]; then + local filtered_config + filtered_config=$(get_filtered_test_config) + if [[ -n "$filtered_config" ]]; then + extra_vars="--extra-vars '$filtered_config'" + fi + fi + + if [[ -n "$EXTRA_VARS" ]]; then + log a "INFO" "Using additional extra vars: $EXTRA_VARS" + escaped_extra_vars="${EXTRA_VARS//\'/\'\"\'\"\'}" + extra_vars+=" --extra-vars '$escaped_extra_vars'" + fi + # Set local secret_id and key_vault_id if defined local secret_id=$(grep "^secret_id:" "$system_params" | awk '{split($0,a,": "); print a[2]}' | xargs || true) local key_vault_id=$(grep "^key_vault_id:" "$system_params" | awk '{split($0,a,": "); print a[2]}' | xargs || true) @@ -232,13 +333,39 @@ run_ansible_playbook() { check_file_exists "$temp_file" \ "Temporary SSH key file not found. Please check the Key Vault secret ID." command="ansible-playbook ${cmd_dir}/../src/$playbook_name.yml -i $system_hosts --private-key $temp_file \ - -e @$VARS_FILE -e @$system_params -e '_workspace_directory=$system_config_folder'" + -e @$VARS_FILE -e @$system_params -e '_workspace_directory=$system_config_folder' $extra_vars" else - check_file_exists "${cmd_dir}/../WORKSPACES/SYSTEM/$SYSTEM_CONFIG_NAME/ssh_key.ppk" \ - "ssh_key.ppk not found in WORKSPACES/SYSTEM/$SYSTEM_CONFIG_NAME directory." - ssh_key="${cmd_dir}/../WORKSPACES/SYSTEM/$SYSTEM_CONFIG_NAME/ssh_key.ppk" + local ssh_key_dir="${cmd_dir}/../WORKSPACES/SYSTEM/$SYSTEM_CONFIG_NAME" + local ssh_key="" + local extensions=("ppk" "pem" "key" "private" "rsa" "ed25519" "ecdsa" "dsa" "") + + for ext in "${extensions[@]}"; do + if [[ -n "$ext" ]]; then + local key_file="${ssh_key_dir}/ssh_key.${ext}" + else + local key_file="${ssh_key_dir}/ssh_key" + fi + + if [[ -f "$key_file" ]]; then + ssh_key="$key_file" + log "INFO" "Found SSH key file: $ssh_key" + break + fi + done + + if [[ -z "$ssh_key" ]]; then + ssh_key=$(find "$ssh_key_dir" -name "*ssh_key*" -type f | head -n 1) + if [[ -n "$ssh_key" ]]; then + log "INFO" "Found SSH key file with pattern: $ssh_key" + fi + fi + + check_file_exists "$ssh_key" \ + "SSH key file not found in WORKSPACES/SYSTEM/$SYSTEM_CONFIG_NAME directory. Looked for files with patterns: ssh_key.*, *ssh_key*" + + chmod 600 "$ssh_key" command="ansible-playbook ${cmd_dir}/../src/$playbook_name.yml -i $system_hosts --private-key $ssh_key \ - -e @$VARS_FILE -e @$system_params -e '_workspace_directory=$system_config_folder'" + -e @$VARS_FILE -e @$system_params -e '_workspace_directory=$system_config_folder' $extra_vars" fi elif [[ "$auth_type" == "VMPASSWORD" ]]; then @@ -267,8 +394,12 @@ run_ansible_playbook() { exit 1 fi - log "INFO" "Running ansible playbook..." - log "INFO" "Executing: $command" + # Add verbosity if specified + if [[ -n "$ANSIBLE_VERBOSE" ]]; then + command+=" $ANSIBLE_VERBOSE" + fi + + log "INFO" "Running ansible playbook... Command: $command" eval $command return_code=$? log "INFO" "Ansible playbook execution completed with return code: $return_code" @@ -288,6 +419,16 @@ main() { log "INFO" "Activate the virtual environment..." set -e + # Parse command line arguments + parse_arguments "$@" + + if [[ -n "$TEST_GROUPS" ]]; then + log "INFO" "Test group specified: $TEST_GROUPS" + fi + if [[ -n "$TEST_CASES" ]]; then + log "INFO" "Test cases specified: $TEST_CASES" + fi + # Validate parameters validate_params @@ -314,4 +455,4 @@ main() { } # Execute the main function -main \ No newline at end of file +main "$@" diff --git a/scripts/setup.sh b/scripts/setup.sh index d113356f..d9a26118 100755 --- a/scripts/setup.sh +++ b/scripts/setup.sh @@ -52,9 +52,9 @@ packages=("python3-pip" "ansible" "sshpass" "python3-venv") install_packages "${packages[@]}" -if [ ! -d "../.venv" ]; then +if [ ! -d ".venv" ]; then log "INFO" "Creating Python virtual environment..." - if python3 -m venv ../.venv; then + if python3 -m venv .venv; then log "INFO" "Python virtual environment created." else log "ERROR" "Failed to create Python virtual environment." @@ -64,7 +64,7 @@ fi # Ensure virtual environment is activated log "INFO" "Activating Python virtual environment..." -if source ../.venv/bin/activate; then +if source .venv/bin/activate; then log "INFO" "Python virtual environment activated." else log "ERROR" "Failed to activate Python virtual environment." diff --git a/src/module_utils/commands.py b/src/module_utils/commands.py index 3ff96d81..c3aa5fb9 100644 --- a/src/module_utils/commands.py +++ b/src/module_utils/commands.py @@ -9,6 +9,11 @@ """ from __future__ import absolute_import, division, print_function +try: + from ansible.module_utils.enums import OperatingSystemFamily +except ImportError: + from src.module_utils.enums import OperatingSystemFamily + __metaclass__ = type DOCUMENTATION = r""" @@ -21,8 +26,8 @@ """ STONITH_ACTION = { - "REDHAT": ["pcs", "property", "config", "stonith-action"], - "SUSE": ["crm", "configure", "get_property", "stonith-action"], + OperatingSystemFamily.REDHAT: ["pcs", "property", "config", "stonith-action"], + OperatingSystemFamily.SUSE: ["crm", "configure", "get_property", "stonith-action"], } AUTOMATED_REGISTER = [ @@ -48,8 +53,8 @@ CONSTRAINTS = ["cibadmin", "--query", "--scope", "constraints"] RSC_CLEAR = { - "SUSE": lambda rsc: ["crm", "resource", "clear", rsc], - "REDHAT": lambda rsc: ["pcs", "resource", "clear", rsc], + OperatingSystemFamily.SUSE: lambda rsc: ["crm", "resource", "clear", rsc], + OperatingSystemFamily.REDHAT: lambda rsc: ["pcs", "resource", "clear", rsc], } CIB_ADMIN = lambda scope: ["cibadmin", "--query", "--scope", scope] diff --git a/src/module_utils/enums.py b/src/module_utils/enums.py new file mode 100644 index 00000000..5b8f476f --- /dev/null +++ b/src/module_utils/enums.py @@ -0,0 +1,138 @@ +""" +This module defines various enumerations and data classes used throughout the sap-automation-qa +""" + +from enum import Enum +from typing import Dict, Any, List + + +class TelemetryDataDestination(Enum): + """ + Enum for the destination of the telemetry data. + """ + + KUSTO = "azuredataexplorer" + LOG_ANALYTICS = "azureloganalytics" + + +class TestStatus(Enum): + """ + Enum for the status of the test case/step. + """ + + SUCCESS = "PASSED" + ERROR = "FAILED" + WARNING = "WARNING" + INFO = "INFO" + NOT_STARTED = "NOT_STARTED" + + +class OperatingSystemFamily(Enum): + """ + Enum for the operating system family. + """ + + REDHAT = "REDHAT" + SUSE = "SUSE" + DEBIAN = "DEBIAN" + WINDOWS = "WINDOWS" + + +class HanaSRProvider(Enum): + """ + Enum for the SAP HANA SR provider type. + """ + + SAPHANASR = "SAPHanaSR" + ANGI = "SAPHanaSR-angi" + + +class Parameters: + """ + This class stores the parameters for the test case. + + :param category: The category of the parameter + :type category: str + :param id: Unique identifier for the parameter + :type id: str + :param name: Name of the parameter + :type name: str + :param value: Current value of the parameter + :type value: Any + :param expected_value: Expected value for validation + :type expected_value: Any + :param status: Current status of the parameter validation + :type status: str + """ + + def __init__( + self, category: str, id: str, name: str, value: Any, expected_value: Any, status: str + ): + self.category = category + self.id = id + self.name = name + self.value = value + self.expected_value = expected_value + self.status = status + + def to_dict(self) -> Dict[str, Any]: + """ + Converts the parameters to a dictionary. + + return: Dictionary containing the parameters + rtype: Dict[str, Any] + """ + return { + "category": self.category, + "id": self.id, + "name": self.name, + "value": self.value, + "expected_value": self.expected_value, + "status": self.status, + } + + +class Result: + """ + This class stores the result of the test case. + + :param status: Current status of the test + :type status: str + :param message: Descriptive message about the result + :type message: str + :param details: List of detailed information + :type details: List[Any] + :param logs: List of log messages + :type logs: List[str] + :param changed: Whether the test caused any changes + :type changed: bool + """ + + def __init__( + self, + status: str = "", + message: str = "", + details: List[Any] = list(), + logs: List[str] = list(), + changed: bool = False, + ): + self.status = status if status is not None else TestStatus.NOT_STARTED.value + self.message = message + self.details = details if details is not None else [] + self.logs = logs if logs is not None else [] + self.changed = changed + + def to_dict(self) -> Dict[str, Any]: + """ + Converts the result to a dictionary. + + return: Dictionary containing the result + rtype: Dict[str, Any] + """ + return { + "status": self.status, + "message": self.message, + "details": self.details.copy(), + "logs": self.logs.copy(), + "changed": self.changed, + } diff --git a/src/module_utils/filter_tests.py b/src/module_utils/filter_tests.py new file mode 100644 index 00000000..24f323e4 --- /dev/null +++ b/src/module_utils/filter_tests.py @@ -0,0 +1,134 @@ +#!/usr/bin/env python3 + +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +""" +Test Filter Module + +This module provides functionality to filter test groups and test cases +from the input-api.yaml configuration based on command line arguments. +""" + +import sys +import json +from typing import Dict, List, Optional, Any +import yaml + + +class TestFilter: + """Filter test configuration based on specified groups and cases.""" + + def __init__(self, input_file: str): + """ + Initialize the TestFilter with the input YAML file. + + :param input_file: Path to the input YAML file + :type input_file: str + """ + self.input_file = input_file + self.config = self._load_config() + + def _load_config(self) -> Dict[str, Any]: + """ + Load the configuration from the input YAML file. + + :return: Loaded configuration + :rtype: Dict[str, Any] + """ + try: + with open(self.input_file, "r", encoding="utf-8") as f: + return yaml.safe_load(f) + except FileNotFoundError: + print(f"Error: Configuration file {self.input_file} not found", file=sys.stderr) + sys.exit(1) + except yaml.YAMLError as e: + print(f"Error parsing YAML file {self.input_file}: {e}", file=sys.stderr) + sys.exit(1) + + def filter_tests( + self, test_group: Optional[str] = None, test_cases: Optional[List[str]] = None + ) -> str: + """ + Filter the test configuration based on the specified test group and test cases. + + :param test_group: Name of the test group to filter, defaults to None + :type test_group: Optional[str], optional + :param test_cases: List of test case task names to include, defaults to None + :type test_cases: Optional[List[str]], optional + :return: JSON string representation of the filtered test configuration + :rtype: str + """ + filtered_config = self.config.copy() + + if test_group or test_cases: + for group in filtered_config["test_groups"]: + if test_group and group["name"] == test_group: + if test_cases: + filtered_cases = [] + for case in group["test_cases"]: + if case["task_name"] in test_cases: + case["enabled"] = True + filtered_cases.append(case) + group["test_cases"] = filtered_cases + elif test_group and group["name"] != test_group: + for case in group["test_cases"]: + case["enabled"] = False + elif test_cases and not test_group: + for case in group["test_cases"]: + if case["task_name"] in test_cases: + case["enabled"] = True + else: + case["enabled"] = False + + return json.dumps(filtered_config, indent=2) + + def get_ansible_vars( + self, test_group: Optional[str] = None, test_cases: Optional[List[str]] = None + ) -> str: + """ + Get Ansible variables from the filtered test configuration. + + :param test_group: Name of the test group to filter, defaults to None + :type test_group: Optional[str], optional + :param test_cases: List of test case task names to include, defaults to None + :type test_cases: Optional[List[str]], optional + :return: JSON string representation of the Ansible variables + :rtype: str + """ + filtered_json = self.filter_tests(test_group, test_cases) + filtered_config = json.loads(filtered_json) + return json.dumps({"test_groups": filtered_config["test_groups"]}) + + +def main(): + """ + Command line interface for the test filter. + """ + if len(sys.argv) < 2: + print( + "Usage: python filter_tests.py [test_group] [test_cases...]", + file=sys.stderr, + ) + print( + "Example: " + + "python filter_tests.py input-api.yaml HA_DB_HANA ha-config,primary-node-crash", + file=sys.stderr, + ) + sys.exit(1) + + input_file = sys.argv[1] + test_group = sys.argv[2] if len(sys.argv) > 2 and sys.argv[2] != "null" else None + test_cases_str = sys.argv[3] if len(sys.argv) > 3 and sys.argv[3] != "null" else None + + test_cases = None + if test_cases_str: + test_cases = [case.strip() for case in test_cases_str.split(",")] + + filter_obj = TestFilter(input_file) + result = filter_obj.get_ansible_vars(test_group, test_cases) + print(result) + + +if __name__ == "__main__": + main() diff --git a/src/module_utils/get_cluster_status.py b/src/module_utils/get_cluster_status.py index 42e09cc2..518bf42e 100644 --- a/src/module_utils/get_cluster_status.py +++ b/src/module_utils/get_cluster_status.py @@ -6,19 +6,22 @@ """ import logging +from abc import abstractmethod import xml.etree.ElementTree as ET from datetime import datetime from typing import Dict, Any try: - from ansible.module_utils.sap_automation_qa import SapAutomationQA, TestStatus + from ansible.module_utils.sap_automation_qa import SapAutomationQA + from ansible.module_utils.enums import TestStatus, OperatingSystemFamily from ansible.module_utils.commands import ( STONITH_ACTION, PACEMAKER_STATUS, CLUSTER_STATUS, ) except ImportError: - from src.module_utils.sap_automation_qa import SapAutomationQA, TestStatus + from src.module_utils.sap_automation_qa import SapAutomationQA + from src.module_utils.enums import TestStatus, OperatingSystemFamily from src.module_utils.commands import ( STONITH_ACTION, PACEMAKER_STATUS, @@ -31,7 +34,7 @@ class BaseClusterStatusChecker(SapAutomationQA): Base class to check the status of a pacemaker cluster. """ - def __init__(self, ansible_os_family: str = ""): + def __init__(self, ansible_os_family: OperatingSystemFamily): super().__init__() self.ansible_os_family = ansible_os_family self.result.update( @@ -44,6 +47,43 @@ def __init__(self, ansible_os_family: str = ""): } ) + @abstractmethod + def _process_node_attributes(self, cluster_status_xml: ET.Element) -> Dict[str, Any]: + """ + Abstract method to process node attributes. + + :param node_attributes: XML element containing node attributes. + :type node_attributes: ET.Element + :raises NotImplementedError: If the method is not implemented in a child class. + :return: Dictionary with node attributes. + :rtype: Dict[str, Any] + """ + raise NotImplementedError("Child classes must implement this method") + + @abstractmethod + def _is_cluster_ready(self) -> bool: + """ + Abstract method to check if the cluster is ready. + To be implemented by child classes. + + :raises NotImplementedError: If the method is not implemented in a child class. + :return: True if the cluster is ready, False otherwise. + :rtype: bool + """ + raise NotImplementedError("Child classes must implement this method") + + @abstractmethod + def _is_cluster_stable(self) -> bool: + """ + Abstract method to check if the cluster is in a stable state. + To be implemented by child classes. + + :raises NotImplementedError: If the method is not implemented in a child class. + :return: True if the cluster is ready, False otherwise. + :rtype: bool + """ + raise NotImplementedError("Child classes must implement this method") + def _get_stonith_action(self) -> None: """ Retrieves the stonith action from the system. @@ -63,7 +103,7 @@ def _get_stonith_action(self) -> None: except Exception as ex: self.log(logging.WARNING, f"Failed to get stonith action: {str(ex)}") - def _validate_cluster_basic_status(self, cluster_status_xml: ET.Element): + def _validate_cluster_basic_status(self, cluster_status_xml: ET.Element) -> None: """ Validate the basic status of the cluster. @@ -76,28 +116,32 @@ def _validate_cluster_basic_status(self, cluster_status_xml: ET.Element): self.result["pacemaker_status"] = "stopped" self.log(logging.INFO, f"Pacemaker status: {self.result['pacemaker_status']}") - if int(cluster_status_xml.find("summary").find("nodes_configured").attrib["number"]) < 2: - self.result["message"] = "Pacemaker cluster isn't stable (insufficient nodes)" + summary = cluster_status_xml.find("summary") + if summary is None: + self.log(logging.ERROR, "Cluster status summary not found in XML") + return + + nodes_configured = summary.find("nodes_configured") + if nodes_configured is None: + self.log(logging.ERROR, "Nodes configured not found in cluster status summary") + return + + if int(nodes_configured.attrib.get("number", 0)) < 2: + self.result["message"] = ( + "Pacemaker cluster isn't stable (insufficient nodes configured)" + ) self.log(logging.WARNING, self.result["message"]) nodes = cluster_status_xml.find("nodes") + if nodes is None: + self.log(logging.ERROR, "Nodes not found in cluster status XML") + return + for node in nodes: if node.attrib["online"] != "true": self.result["message"] = f"Node {node.attrib['name']} is not online" self.log(logging.WARNING, self.result["message"]) - def _process_node_attributes(self, cluster_status_xml: ET.Element) -> Dict[str, Any]: - """ - Abstract method to process node attributes. - - :param node_attributes: XML element containing node attributes. - :type node_attributes: ET.Element - :raises NotImplementedError: If the method is not implemented in a child class. - :return: Dictionary with node attributes. - :rtype: Dict[str, Any] - """ - raise NotImplementedError("Child classes must implement this method") - def run(self) -> Dict[str, str]: """ Run the cluster status check. @@ -128,25 +172,3 @@ def run(self) -> Dict[str, str]: self.result["status"] = TestStatus.SUCCESS.value self.log(logging.INFO, "Cluster status check completed") return self.result - - def _is_cluster_ready(self) -> bool: - """ - Abstract method to check if the cluster is ready. - To be implemented by child classes. - - :raises NotImplementedError: If the method is not implemented in a child class. - :return: True if the cluster is ready, False otherwise. - :rtype: bool - """ - raise NotImplementedError("Child classes must implement this method") - - def _is_cluster_stable(self) -> bool: - """ - Abstract method to check if the cluster is in a stable state. - To be implemented by child classes. - - :raises NotImplementedError: If the method is not implemented in a child class. - :return: True if the cluster is ready, False otherwise. - :rtype: bool - """ - raise NotImplementedError("Child classes must implement this method") diff --git a/src/module_utils/sap_automation_qa.py b/src/module_utils/sap_automation_qa.py index 7b2fa9c7..41fb4091 100644 --- a/src/module_utils/sap_automation_qa.py +++ b/src/module_utils/sap_automation_qa.py @@ -4,63 +4,16 @@ """ from abc import ABC -from enum import Enum import sys import logging import subprocess from typing import Optional, Dict, Any import xml.etree.ElementTree as ET - -class TelemetryDataDestination(Enum): - """ - Enum for the destination of the telemetry data. - """ - - KUSTO = "azuredataexplorer" - LOG_ANALYTICS = "azureloganalytics" - - -class TestStatus(Enum): - """ - Enum for the status of the test case/step. - """ - - SUCCESS = "PASSED" - ERROR = "FAILED" - WARNING = "WARNING" - INFO = "INFO" - NOT_STARTED = "NOT_STARTED" - - -class Parameters: - """ - This class is used to store the parameters for the test case - """ - - def __init__(self, category, id, name, value, expected_value, status): - self.category = category - self.id = id - self.name = name - self.value = value - self.expected_value = expected_value - self.status = status - - def to_dict(self) -> Dict[str, Any]: - """ - This method is used to convert the parameters to a dictionary - - :return: Dictionary containing the parameters - :rtype: Dict[str, Any] - """ - return { - "category": self.category, - "id": self.id, - "name": self.name, - "value": self.value, - "expected_value": self.expected_value, - "status": self.status, - } +try: + from ansible.module_utils.enums import Result, TestStatus +except ImportError: + from src.module_utils.enums import Result, TestStatus class SapAutomationQA(ABC): @@ -71,13 +24,7 @@ class SapAutomationQA(ABC): def __init__(self): self.logger = self.setup_logger() - self.result = { - "status": TestStatus.NOT_STARTED.value, - "message": "", - "details": [], - "logs": [], - "changed": False, - } + self.result = Result().to_dict() def setup_logger(self) -> logging.Logger: """ @@ -107,7 +54,7 @@ def log(self, level: int, message: str): message.replace("\n", " ") self.result["logs"].append(message) - def handle_error(self, exception: Exception, stderr: str = None): + def handle_error(self, exception: Exception, stderr: str = ""): """ Handles command execution errors by logging and updating the result dictionary. @@ -125,7 +72,7 @@ def handle_error(self, exception: Exception, stderr: str = None): self.result["message"] = error_message self.result["logs"].append(error_message) - def execute_command_subprocess(self, command: str, shell_command: bool = False) -> str: + def execute_command_subprocess(self, command: Any, shell_command: bool = False) -> str: """ Executes a shell command using subprocess with a timeout and logs output or errors. diff --git a/src/modules/check_indexserver.py b/src/modules/check_indexserver.py index 7f52a235..bdfe3ee0 100644 --- a/src/modules/check_indexserver.py +++ b/src/modules/check_indexserver.py @@ -7,11 +7,14 @@ import logging from ansible.module_utils.basic import AnsibleModule +from ansible.module_utils.facts.compat import ansible_facts try: - from ansible.module_utils.sap_automation_qa import SapAutomationQA, TestStatus + from ansible.module_utils.sap_automation_qa import SapAutomationQA + from ansible.module_utils.enums import TestStatus, OperatingSystemFamily except ImportError: - from src.module_utils.sap_automation_qa import SapAutomationQA, TestStatus + from src.module_utils.sap_automation_qa import SapAutomationQA + from src.module_utils.enums import TestStatus, OperatingSystemFamily DOCUMENTATION = r""" --- @@ -27,11 +30,6 @@ - SAP HANA database SID type: str required: true - ansible_os_family: - description: - - Operating system distribution (e.g., 'redhat' or 'suse') - type: str - required: true author: - Microsoft Corporation notes: @@ -45,7 +43,6 @@ - name: Check if SAP HANA indexserver is configured check_indexserver: database_sid: "HDB" - ansible_os_family: "{{ ansible_os_family|lower }}" register: indexserver_result - name: Display indexserver check results @@ -92,7 +89,7 @@ class IndexServerCheck(SapAutomationQA): :type os_distribution: str """ - def __init__(self, database_sid: str, os_distribution: str): + def __init__(self, database_sid: str, os_distribution: OperatingSystemFamily): super().__init__() self.database_sid = database_sid self.os_distribution = os_distribution @@ -102,7 +99,7 @@ def check_indexserver(self) -> None: Checks if the indexserver is configured. """ expected_properties = { - "redhat": [ + OperatingSystemFamily.REDHAT: [ { "[ha_dr_provider_chksrv]": { "provider": "ChkSrv", @@ -116,7 +113,7 @@ def check_indexserver(self) -> None: } }, ], - "suse": [ + OperatingSystemFamily.SUSE: [ { "[ha_dr_provider_suschksrv]": { "provider": "susChkSrv", @@ -129,6 +126,12 @@ def check_indexserver(self) -> None: "path": "/hana/shared/myHooks", } }, + { + "[ha_dr_provider_suschksrv]": { + "provider": "susChkSrv", + "path": "/usr/share/SAPHanaSR-angi", + } + }, ], } @@ -211,14 +214,18 @@ def main(): module = AnsibleModule( argument_spec=dict( database_sid=dict(type="str", required=True), - ansible_os_family=dict(type="str", required=True), + filter=dict(type="str", required=False, default="os_family"), ) ) database_sid = module.params["database_sid"] - os_distribution = module.params["ansible_os_family"] - index_server_check = IndexServerCheck(database_sid, os_distribution) + index_server_check = IndexServerCheck( + database_sid=database_sid, + os_distribution=OperatingSystemFamily( + str(ansible_facts(module).get("os_family", "SUSE")).upper() + ), + ) index_server_check.check_indexserver() module.exit_json(**index_server_check.get_result()) diff --git a/src/modules/filesystem_freeze.py b/src/modules/filesystem_freeze.py index 17bbcd0a..86e28743 100644 --- a/src/modules/filesystem_freeze.py +++ b/src/modules/filesystem_freeze.py @@ -129,7 +129,7 @@ def _find_filesystem(self) -> Tuple[str, str]: return parts[0], "/hana/shared" except FileNotFoundError as ex: self.handle_error(ex) - return None, None + return "", "" def run(self) -> Dict[str, Any]: """ diff --git a/src/modules/get_azure_lb.py b/src/modules/get_azure_lb.py index 14ff0df0..73c3f159 100644 --- a/src/modules/get_azure_lb.py +++ b/src/modules/get_azure_lb.py @@ -13,17 +13,11 @@ from ansible.module_utils.basic import AnsibleModule try: - from ansible.module_utils.sap_automation_qa import ( - SapAutomationQA, - TestStatus, - Parameters, - ) + from ansible.module_utils.sap_automation_qa import SapAutomationQA + from ansible.module_utils.enums import TestStatus, Parameters except ImportError: - from src.module_utils.sap_automation_qa import ( - SapAutomationQA, - TestStatus, - Parameters, - ) + from src.module_utils.sap_automation_qa import SapAutomationQA + from src.module_utils.enums import TestStatus, Parameters DOCUMENTATION = r""" --- @@ -174,7 +168,7 @@ def __init__(self, module_params: Dict): self.network_client = None self.constants = module_params["constants"].get("AZURE_LOADBALANCER", {}) - def _create_network_client(self): + def _create_network_client(self) -> bool: """ Create the network client object. """ @@ -188,11 +182,13 @@ def _create_network_client(self): self.network_client = NetworkManagementClient( self.credential, self.module_params["subscription_id"] ) + return True except Exception as ex: self.handle_error(ex) self.result["message"] += ( " Failed to authenticate to Azure to read the Load " + f"Balancer Details. {ex} \n" ) + return False def get_load_balancers(self) -> list: """ @@ -202,23 +198,24 @@ def get_load_balancers(self) -> list: :rtype: list """ try: + if self.network_client is None: + return [] + load_balancers = self.network_client.load_balancers.list_all() return [ lb.as_dict() for lb in load_balancers - if lb.location.lower() == self.module_params["region"].lower() + if str(lb.location).lower() == self.module_params["region"].lower() ] except Exception as ex: self.handle_error(ex) self.result["message"] += f" Failed to get load balancers. {ex} \n" + return [] - def get_load_balancers_details(self) -> dict: + def get_load_balancers_details(self) -> None: """ Get the details of the load balancers in a specific resource group. - - :return: Dictionary containing the result of the test case. - :rtype: dict """ self._create_network_client() diff --git a/src/modules/get_cluster_status_db.py b/src/modules/get_cluster_status_db.py index 22462283..949f6e58 100644 --- a/src/modules/get_cluster_status_db.py +++ b/src/modules/get_cluster_status_db.py @@ -5,16 +5,20 @@ Python script to get and validate the status of a HANA cluster. """ +import logging import xml.etree.ElementTree as ET from typing import Dict, Any from ansible.module_utils.basic import AnsibleModule +from ansible.module_utils.facts.compat import ansible_facts try: from ansible.module_utils.get_cluster_status import BaseClusterStatusChecker + from ansible.module_utils.enums import OperatingSystemFamily, HanaSRProvider from ansible.module_utils.commands import AUTOMATED_REGISTER except ImportError: from src.module_utils.get_cluster_status import BaseClusterStatusChecker from src.module_utils.commands import AUTOMATED_REGISTER + from src.module_utils.enums import OperatingSystemFamily, HanaSRProvider DOCUMENTATION = r""" @@ -37,11 +41,11 @@ - SAP HANA database SID type: str required: true - ansible_os_family: + saphanasr_provider: description: - - Operating system family (redhat, suse, etc.) + - The SAP HANA system replication provider type type: str - required: false + required: true author: - Microsoft Corporation notes: @@ -58,7 +62,7 @@ get_cluster_status_db: operation_step: "check_cluster" database_sid: "HDB" - ansible_os_family: "{{ ansible_os_family|lower }}" + saphanasr_provider: "SAPHanaSR" register: cluster_result - name: Display cluster status @@ -131,9 +135,15 @@ class HanaClusterStatusChecker(BaseClusterStatusChecker): Class to check the status of a pacemaker cluster in a SAP HANA environment. """ - def __init__(self, database_sid: str, ansible_os_family: str = ""): + def __init__( + self, + database_sid: str, + saphanasr_provider: HanaSRProvider, + ansible_os_family: OperatingSystemFamily, + ): super().__init__(ansible_os_family) self.database_sid = database_sid + self.saphanasr_provider = saphanasr_provider self.result.update( { "primary_node": "", @@ -173,44 +183,60 @@ def _process_node_attributes(self, cluster_status_xml: ET.Element) -> Dict[str, "primary_site_name": "", } node_attributes = cluster_status_xml.find("node_attributes") - attribute_map = { - f"hana_{self.database_sid}_op_mode": "operation_mode", - f"hana_{self.database_sid}_srmode": "replication_mode", + if node_attributes is None: + self.log( + logging.ERROR, + "No node attributes found in the cluster status XML.", + ) + return result + + providers = { + HanaSRProvider.SAPHANASR: { + "clone_attr": f"hana_{self.database_sid}_clone_state", + "sync_attr": f"hana_{self.database_sid}_sync_state", + "primary": {"clone": "PROMOTED", "sync": "PRIM"}, + "secondary": {"clone": "DEMOTED", "sync": "SOK"}, + }, + HanaSRProvider.ANGI: { + "clone_attr": f"hana_{self.database_sid}_clone_state", + "sync_attr": f"master-rsc_SAPHanaCon_{self.database_sid.upper()}_HDB00", + "primary": {"clone": "PROMOTED", "sync": "150"}, + "secondary": {"clone": "DEMOTED", "sync": "100"}, + }, } + provider_config = providers.get( + self.saphanasr_provider, providers[HanaSRProvider.SAPHANASR] + ) for node in node_attributes: node_name = node.attrib["name"] - node_states = {} - node_attributes_dict = {} - - for attribute in node: - attr_name = attribute.attrib["name"] - attr_value = attribute.attrib["value"] - node_attributes_dict[attr_name] = attr_value - - if attr_name in attribute_map: - result[attribute_map[attr_name]] = attr_value - - if attr_name == f"hana_{self.database_sid}_clone_state": - node_states["clone_state"] = attr_value - elif attr_name == f"hana_{self.database_sid}_sync_state": - node_states["sync_state"] = attr_value - + attrs = {attr.attrib["name"]: attr.attrib["value"] for attr in node} + result["operation_mode"] = attrs.get( + f"hana_{self.database_sid}_op_mode", result["operation_mode"] + ) + result["replication_mode"] = attrs.get( + f"hana_{self.database_sid}_srmode", result["replication_mode"] + ) + clone_state = attrs.get(provider_config["clone_attr"], "") + sync_state = attrs.get(provider_config["sync_attr"], "") if ( - node_states.get("clone_state") == "PROMOTED" - and node_states.get("sync_state") == "PRIM" + clone_state == provider_config["primary"]["clone"] + and sync_state == provider_config["primary"]["sync"] ): - result["primary_node"] = node_name - result["cluster_status"]["primary"] = node_attributes_dict - result["primary_site_name"] = node_attributes_dict.get( - f"hana_{self.database_sid}_site", "" + result.update( + { + "primary_node": node_name, + "primary_site_name": attrs.get(f"hana_{self.database_sid}_site", ""), + } ) + result["cluster_status"]["primary"] = attrs + elif ( - node_states.get("clone_state") == "DEMOTED" - and node_states.get("sync_state") == "SOK" + clone_state == provider_config["secondary"]["clone"] + and sync_state == provider_config["secondary"]["sync"] ): result["secondary_node"] = node_name - result["cluster_status"]["secondary"] = node_attributes_dict + result["cluster_status"]["secondary"] = attrs self.result.update(result) return result @@ -252,14 +278,18 @@ def run_module() -> None: module_args = dict( operation_step=dict(type="str", required=True), database_sid=dict(type="str", required=True), - ansible_os_family=dict(type="str", required=False), + saphanasr_provider=dict(type="str", required=True), + filter=dict(type="str", required=False, default="os_family"), ) module = AnsibleModule(argument_spec=module_args, supports_check_mode=True) checker = HanaClusterStatusChecker( database_sid=module.params["database_sid"], - ansible_os_family=module.params["ansible_os_family"], + saphanasr_provider=HanaSRProvider(module.params["saphanasr_provider"]), + ansible_os_family=OperatingSystemFamily( + str(ansible_facts(module).get("os_family", "SUSE")).upper() + ), ) checker.run() diff --git a/src/modules/get_cluster_status_scs.py b/src/modules/get_cluster_status_scs.py index 76c2cf75..03f2be33 100644 --- a/src/modules/get_cluster_status_scs.py +++ b/src/modules/get_cluster_status_scs.py @@ -9,15 +9,18 @@ import xml.etree.ElementTree as ET from typing import Dict, Any from ansible.module_utils.basic import AnsibleModule +from ansible.module_utils.facts.compat import ansible_facts try: from ansible.module_utils.get_cluster_status import BaseClusterStatusChecker from ansible.module_utils.commands import CIB_ADMIN + from ansible.module_utils.enums import OperatingSystemFamily except ImportError: from src.module_utils.get_cluster_status import BaseClusterStatusChecker from src.module_utils.commands import ( CIB_ADMIN, ) + from src.module_utils.enums import OperatingSystemFamily DOCUMENTATION = r""" @@ -36,12 +39,6 @@ - Used to identify the specific ASCS and ERS resources. type: str required: true - ansible_os_family: - description: - - Operating system family (e.g., redhat, suse). - - Used to determine OS-specific commands and configurations. - type: str - required: false author: - Microsoft Corporation notes: @@ -58,7 +55,6 @@ - name: Check SAP SCS cluster status get_cluster_status_scs: sap_sid: "S4D" - ansible_os_family: "{{ ansible_os_family|lower }}" register: cluster_result - name: Display SCS cluster status @@ -118,7 +114,7 @@ class SCSClusterStatusChecker(BaseClusterStatusChecker): def __init__( self, sap_sid: str, - ansible_os_family: str = "", + ansible_os_family: OperatingSystemFamily, ): super().__init__(ansible_os_family) self.sap_sid = sap_sid @@ -285,14 +281,15 @@ def run_module() -> None: """ module_args = dict( sap_sid=dict(type="str", required=True), - ansible_os_family=dict(type="str", required=False), + filter=dict(type="str", required=False, default="os_family"), ) module = AnsibleModule(argument_spec=module_args, supports_check_mode=True) + ansible_os_family = str(ansible_facts(module).get("os_family", "SUSE")).upper() checker = SCSClusterStatusChecker( sap_sid=module.params["sap_sid"], - ansible_os_family=module.params["ansible_os_family"], + ansible_os_family=OperatingSystemFamily(ansible_os_family), ) checker.run() diff --git a/src/modules/get_pcmk_properties_db.py b/src/modules/get_pcmk_properties_db.py index 5a67e85a..0ffdff64 100644 --- a/src/modules/get_pcmk_properties_db.py +++ b/src/modules/get_pcmk_properties_db.py @@ -11,21 +11,22 @@ HAClusterValidator: Main validator class for cluster configurations. """ +import logging from ansible.module_utils.basic import AnsibleModule +from ansible.module_utils.facts.compat import ansible_facts try: - from ansible.module_utils.sap_automation_qa import ( - SapAutomationQA, - TestStatus, + from ansible.module_utils.sap_automation_qa import SapAutomationQA + from ansible.module_utils.enums import ( + OperatingSystemFamily, Parameters, + TestStatus, + HanaSRProvider, ) from ansible.module_utils.commands import CIB_ADMIN except ImportError: - from src.module_utils.sap_automation_qa import ( - SapAutomationQA, - TestStatus, - Parameters, - ) + from src.module_utils.sap_automation_qa import SapAutomationQA + from src.module_utils.enums import OperatingSystemFamily, Parameters, TestStatus, HanaSRProvider from src.module_utils.commands import CIB_ADMIN DOCUMENTATION = r""" @@ -48,11 +49,6 @@ - SAP HANA instance number type: str required: true - ansible_os_family: - description: - - Operating system family (redhat, suse, etc.) - type: str - required: true virtual_machine_name: description: - Name of the virtual machine @@ -73,6 +69,11 @@ - Dictionary of constants for validation type: dict required: true + saphanasr_provider: + description: + - SAP HANA SR provider type (e.g., SAPHanaSR, SAPHanaSR-angi) + type: str + required: true author: - Microsoft Corporation notes: @@ -89,7 +90,6 @@ get_pcmk_properties_db: sid: "HDB" instance_number: "00" - ansible_os_family: "{{ ansible_os_family|lower }}" virtual_machine_name: "{{ ansible_hostname }}" fencing_mechanism: "sbd" os_version: "{{ ansible_distribution_version }}" @@ -180,27 +180,31 @@ class HAClusterValidator(SapAutomationQA): "sbd_stonith": ".//primitive[@type='external/sbd']", "fence_agent": ".//primitive[@type='fence_azure_arm']", "topology": ".//clone/primitive[@type='SAPHanaTopology']", + "angi_topology": ".//clone/primitive[@type='SAPHanaTopology']", "topology_meta": ".//clone/meta_attributes", "hana": ".//master/primitive[@type='SAPHana']", "hana_meta": ".//master/meta_attributes", "ipaddr": ".//primitive[@type='IPaddr2']", "filesystem": ".//primitive[@type='Filesystem']", "azurelb": ".//primitive[@type='azure-lb']", + "angi_filesystem": ".//primitive[@type='SAPHanaFilesystem']", + "angi_hana": ".//primitive[@type='SAPHanaController']", } def __init__( self, - os_type, - os_version, - sid, - instance_number, - fencing_mechanism, - virtual_machine_name, - constants, + os_type: OperatingSystemFamily, + os_version: str, + sid: str, + instance_number: str, + fencing_mechanism: str, + virtual_machine_name: str, + constants: dict, + saphanasr_provider: HanaSRProvider, category=None, ): super().__init__() - self.os_type = os_type + self.os_type = os_type.value.upper() self.os_version = os_version self.category = category self.sid = sid @@ -208,6 +212,7 @@ def __init__( self.fencing_mechanism = fencing_mechanism self.virtual_machine_name = virtual_machine_name self.constants = constants + self.saphanasr_provider = saphanasr_provider self.parse_ha_cluster_config() def _get_expected_value(self, category, name): @@ -396,16 +401,22 @@ def _parse_global_ini_parameters(self): :rtype: list """ parameters = [] - global_ini_defaults = self.constants["GLOBAL_INI"].get(self.os_type, {}) - + global_ini_defaults = ( + self.constants["GLOBAL_INI"] + .get(self.os_type, {}) + .get(self.saphanasr_provider.value, {}) + ) with open( f"/usr/sap/{self.sid}/SYS/global/hdb/custom/config/global.ini", "r", encoding="utf-8", ) as file: global_ini_content = file.read().splitlines() - - section_start = global_ini_content.index("[ha_dr_provider_SAPHanaSR]") + section_start = ( + global_ini_content.index("[ha_dr_provider_sushanasr]") + if self.saphanasr_provider == HanaSRProvider.ANGI + else global_ini_content.index("[ha_dr_provider_SAPHanaSR]") + ) properties_slice = global_ini_content[section_start + 1 : section_start + 4] global_ini_properties = { @@ -420,6 +431,10 @@ def _parse_global_ini_parameters(self): if isinstance(expected_value, list): if value in expected_value: expected_value = value + self.log( + logging.INFO, + f"param_name: {param_name}, value: {value}, expected_value: {expected_value}", + ) parameters.append( self._create_parameter( category="global_ini", @@ -569,7 +584,12 @@ def parse_ha_cluster_config(self): elif self.category == "resources": try: - for sub_category, xpath in self.RESOURCE_CATEGORIES.items(): + resource_categories = self.RESOURCE_CATEGORIES.copy() + if self.saphanasr_provider == HanaSRProvider.ANGI: + resource_categories.pop("topology", None) + else: + resource_categories.pop("angi_topology", None) + for sub_category, xpath in resource_categories.items(): elements = root.findall(xpath) for element in elements: parameters.extend(self._parse_resource(element, sub_category)) @@ -620,22 +640,24 @@ def main() -> None: argument_spec=dict( sid=dict(type="str"), instance_number=dict(type="str"), - ansible_os_family=dict(type="str"), virtual_machine_name=dict(type="str"), fencing_mechanism=dict(type="str"), os_version=dict(type="str"), pcmk_constants=dict(type="dict"), + saphanasr_provider=dict(type="str"), + filter=dict(type="str", required=False, default="os_family"), ) ) validator = HAClusterValidator( - os_type=module.params["ansible_os_family"], + os_type=OperatingSystemFamily(str(ansible_facts(module).get("os_family", "SUSE")).upper()), os_version=module.params["os_version"], instance_number=module.params["instance_number"], sid=module.params["sid"], virtual_machine_name=module.params["virtual_machine_name"], fencing_mechanism=module.params["fencing_mechanism"], constants=module.params["pcmk_constants"], + saphanasr_provider=HanaSRProvider(module.params["saphanasr_provider"]), ) module.exit_json(**validator.get_result()) diff --git a/src/modules/get_pcmk_properties_scs.py b/src/modules/get_pcmk_properties_scs.py index 6fbbf969..2a031479 100644 --- a/src/modules/get_pcmk_properties_scs.py +++ b/src/modules/get_pcmk_properties_scs.py @@ -12,20 +12,15 @@ """ from ansible.module_utils.basic import AnsibleModule +from ansible.module_utils.facts.compat import ansible_facts try: - from ansible.module_utils.sap_automation_qa import ( - SapAutomationQA, - TestStatus, - Parameters, - ) + from ansible.module_utils.sap_automation_qa import SapAutomationQA + from ansible.module_utils.enums import OperatingSystemFamily, Parameters, TestStatus from ansible.module_utils.commands import CIB_ADMIN except ImportError: - from src.module_utils.sap_automation_qa import ( - SapAutomationQA, - TestStatus, - Parameters, - ) + from src.module_utils.sap_automation_qa import SapAutomationQA + from src.module_utils.enums import OperatingSystemFamily, Parameters, TestStatus from src.module_utils.commands import CIB_ADMIN @@ -55,11 +50,6 @@ - SAP ERS instance number type: str required: true - ansible_os_family: - description: - - Operating system family (redhat, suse, etc.) - type: str - required: true virtual_machine_name: description: - Name of the virtual machine @@ -98,7 +88,6 @@ sid: "S4D" ascs_instance_number: "00" ers_instance_number: "10" - ansible_os_family: "{{ ansible_os_family|lower }}" virtual_machine_name: "{{ ansible_hostname }}" pcmk_constants: "{{ pcmk_validation_constants }}" fencing_mechanism: "sbd" @@ -194,18 +183,18 @@ class HAClusterValidator(SapAutomationQA): def __init__( self, - os_type, - sid, - scs_instance_number, - ers_instance_number, - virtual_machine_name, - constants, - fencing_mechanism, + os_type: OperatingSystemFamily, + sid: str, + scs_instance_number: str, + ers_instance_number: str, + virtual_machine_name: str, + constants: dict, + fencing_mechanism: str, nfs_provider=None, category=None, ): super().__init__() - self.os_type = os_type + self.os_type = os_type.value.upper() self.category = category self.sid = sid self.scs_instance_number = scs_instance_number @@ -586,11 +575,11 @@ def main() -> None: sid=dict(type="str"), ascs_instance_number=dict(type="str"), ers_instance_number=dict(type="str"), - ansible_os_family=dict(type="str"), virtual_machine_name=dict(type="str"), pcmk_constants=dict(type="dict"), fencing_mechanism=dict(type="str"), nfs_provider=dict(type="str", default=""), + filter=dict(type="str", required=False, default="os_family"), ) ) @@ -598,7 +587,7 @@ def main() -> None: sid=module.params["sid"], scs_instance_number=module.params["ascs_instance_number"], ers_instance_number=module.params["ers_instance_number"], - os_type=module.params["ansible_os_family"], + os_type=OperatingSystemFamily(str(ansible_facts(module).get("os_family", "SUSE")).upper()), virtual_machine_name=module.params["virtual_machine_name"], constants=module.params["pcmk_constants"], fencing_mechanism=module.params["fencing_mechanism"], diff --git a/src/modules/location_constraints.py b/src/modules/location_constraints.py index c5bb3ff6..b8913acb 100644 --- a/src/modules/location_constraints.py +++ b/src/modules/location_constraints.py @@ -8,13 +8,16 @@ import xml.etree.ElementTree as ET from typing import List from ansible.module_utils.basic import AnsibleModule +from ansible.module_utils.facts.compat import ansible_facts try: - from ansible.module_utils.sap_automation_qa import SapAutomationQA, TestStatus + from ansible.module_utils.sap_automation_qa import SapAutomationQA from ansible.module_utils.commands import RSC_CLEAR, CONSTRAINTS + from ansible.module_utils.enums import OperatingSystemFamily, TestStatus except ImportError: - from src.module_utils.sap_automation_qa import SapAutomationQA, TestStatus + from src.module_utils.sap_automation_qa import SapAutomationQA from src.module_utils.commands import RSC_CLEAR, CONSTRAINTS + from src.module_utils.enums import OperatingSystemFamily, TestStatus DOCUMENTATION = r""" @@ -33,12 +36,6 @@ type: str required: true choices: ['remove'] - ansible_os_family: - description: - - Operating system family (redhat, suse, etc.) - - Used to determine the appropriate command format for the OS - type: str - required: true author: - Microsoft Corporation notes: @@ -54,7 +51,6 @@ - name: Remove all location constraints location_constraints: action: "remove" - ansible_os_family: "{{ ansible_os_family|lower }}" register: constraints_result - name: Display constraint removal results @@ -96,7 +92,7 @@ class LocationConstraintsManager(SapAutomationQA): Class to manage the location constraints in a pacemaker cluster. """ - def __init__(self, ansible_os_family: str): + def __init__(self, ansible_os_family: OperatingSystemFamily): super().__init__() self.ansible_os_family = ansible_os_family self.result.update( @@ -139,7 +135,8 @@ def location_constraints_exists(self) -> List[ET.Element]: self.result["details"] = xml_output return ET.fromstring(xml_output).findall(".//rsc_location") if xml_output else [] except Exception as ex: - self.handle_exception(ex) + self.handle_error(ex) + return [] def run_module() -> None: @@ -149,14 +146,17 @@ def run_module() -> None: """ module_args = dict( action=dict(type="str", required=True), - ansible_os_family=dict(type="str", required=True), + filter=dict(type="str", required=False, default="os_family"), ) module = AnsibleModule(argument_spec=module_args, supports_check_mode=True) action = module.params["action"] - ansible_os_family = module.params["ansible_os_family"] - manager = LocationConstraintsManager(ansible_os_family) + manager = LocationConstraintsManager( + ansible_os_family=OperatingSystemFamily( + str(ansible_facts(module).get("os_family", "SUSE")).upper() + ) + ) if module.check_mode: module.exit_json(**manager.get_result()) diff --git a/src/modules/log_parser.py b/src/modules/log_parser.py index 4c87cdf6..ade2a123 100644 --- a/src/modules/log_parser.py +++ b/src/modules/log_parser.py @@ -8,11 +8,14 @@ import json from datetime import datetime from ansible.module_utils.basic import AnsibleModule +from ansible.module_utils.facts.compat import ansible_facts try: from ansible.module_utils.sap_automation_qa import SapAutomationQA, TestStatus + from ansible.module_utils.enums import OperatingSystemFamily except ImportError: from src.module_utils.sap_automation_qa import SapAutomationQA, TestStatus + from src.module_utils.enums import OperatingSystemFamily DOCUMENTATION = r""" --- @@ -49,12 +52,6 @@ type: list required: false default: [] - ansible_os_family: - description: - - Operating system family (e.g., REDHAT, SUSE). - - Used to determine the appropriate log timestamp format. - type: str - required: true function: description: - Specifies the function to execute: "parse_logs" or "merge_logs". @@ -85,7 +82,6 @@ start_time: "{{ (ansible_date_time.iso8601 | to_datetime - '1 hour') | to_datetime('%Y-%m-%d %H:%M:%S') }}" end_time: "{{ ansible_date_time.iso8601 | to_datetime('%Y-%m-%d %H:%M:%S') }}" log_file: "/var/log/messages" - ansible_os_family: "{{ ansible_os_family|upper }}" register: parse_result - name: Display filtered log entries @@ -98,7 +94,6 @@ logs: - "[\"Jan 01 12:34:56 server1 pacemaker-controld: Notice: Resource SAPHana_HDB_00 started\"]" - "[\"Jan 01 12:35:00 server2 pacemaker-controld: Notice: Resource SAPHana_HDB_01 started\"]" - ansible_os_family: "REDHAT" register: merge_result - name: Display merged log entries @@ -195,8 +190,8 @@ def __init__( start_time: str, end_time: str, log_file: str, - ansible_os_family: str, - logs: list = None, + ansible_os_family: OperatingSystemFamily, + logs: list = list(), ): super().__init__() self.start_time = start_time @@ -244,13 +239,13 @@ def merge_logs(self) -> None: for log in parsed_logs: try: - if self.ansible_os_family == "REDHAT": + if self.ansible_os_family == OperatingSystemFamily.REDHAT: timestamp_str = " ".join(log.split()[:3]) log_time = datetime.strptime(timestamp_str, "%b %d %H:%M:%S") log_time = log_time.replace(year=datetime.now().year) all_logs.append((log_time, log)) - elif self.ansible_os_family == "SUSE": + elif self.ansible_os_family == OperatingSystemFamily.SUSE: timestamp_str = log.split(".")[0] log_time = datetime.strptime(timestamp_str, "%Y-%m-%dT%H:%M:%S") all_logs.append((log_time, log)) @@ -282,12 +277,12 @@ def parse_logs(self) -> None: with open(self.log_file, "r", encoding="utf-8") as file: for line in file: try: - if self.ansible_os_family == "REDHAT": + if self.ansible_os_family == OperatingSystemFamily.REDHAT: log_time = datetime.strptime( " ".join(line.split()[:3]), "%b %d %H:%M:%S" ) log_time = log_time.replace(year=start_dt.year) - elif self.ansible_os_family == "SUSE": + elif self.ansible_os_family == OperatingSystemFamily.SUSE: log_time = datetime.strptime(line.split(".")[0], "%Y-%m-%dT%H:%M:%S") else: continue @@ -323,18 +318,19 @@ def run_module() -> None: end_time=dict(type="str", required=False), log_file=dict(type="str", required=False, default="/var/log/messages"), keywords=dict(type="list", required=False, default=[]), - ansible_os_family=dict(type="str", required=True), function=dict(type="str", required=True, choices=["parse_logs", "merge_logs"]), logs=dict(type="list", required=False, default=[]), + filter=dict(type="str", required=False, default="os_family"), ) module = AnsibleModule(argument_spec=module_args, supports_check_mode=True) - parser = LogParser( start_time=module.params.get("start_time"), end_time=module.params.get("end_time"), log_file=module.params.get("log_file"), - ansible_os_family=module.params["ansible_os_family"], + ansible_os_family=OperatingSystemFamily( + str(ansible_facts(module).get("os_family", "SUSE")).upper() + ), logs=module.params.get("logs"), ) if module.params["function"] == "parse_logs": diff --git a/src/modules/send_telemetry_data.py b/src/modules/send_telemetry_data.py index 668b407c..147c5764 100644 --- a/src/modules/send_telemetry_data.py +++ b/src/modules/send_telemetry_data.py @@ -20,17 +20,11 @@ from ansible.module_utils.basic import AnsibleModule try: - from ansible.module_utils.sap_automation_qa import ( - SapAutomationQA, - TestStatus, - TelemetryDataDestination, - ) + from ansible.module_utils.sap_automation_qa import SapAutomationQA + from ansible.module_utils.enums import TelemetryDataDestination, TestStatus except ImportError: - from src.module_utils.sap_automation_qa import ( - SapAutomationQA, - TestStatus, - TelemetryDataDestination, - ) + from src.module_utils.sap_automation_qa import SapAutomationQA + from src.module_utils.enums import TelemetryDataDestination, TestStatus DOCUMENTATION = r""" --- @@ -248,9 +242,9 @@ def send_telemetry_data_to_azuredataexplorer(self, telemetry_json_data: str) -> """ import pandas as pd - telemetry_json_data = json.loads(telemetry_json_data) + telemetry_json_dict = json.loads(telemetry_json_data) data_frame = pd.DataFrame( - [telemetry_json_data.values()], columns=telemetry_json_data.keys() + [telemetry_json_dict.values()], columns=telemetry_json_dict.keys() ) ingestion_properties = IngestionProperties( database=self.module_params["adx_database_name"], diff --git a/src/roles/ha_db_hana/tasks/block-network.yml b/src/roles/ha_db_hana/tasks/block-network.yml index e12c87b6..7e4728bb 100644 --- a/src/roles/ha_db_hana/tasks/block-network.yml +++ b/src/roles/ha_db_hana/tasks/block-network.yml @@ -91,7 +91,7 @@ get_cluster_status_db: operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" register: cluster_status_test_execution_primary retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -116,7 +116,7 @@ get_cluster_status_db: operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" register: cluster_status_post_primary retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -134,7 +134,7 @@ get_cluster_status_db: operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" register: cluster_status_test_execution_secondary retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -150,7 +150,7 @@ get_cluster_status_db: operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" register: cluster_status_post_secondary retries: "{{ default_retries }}" delay: "{{ default_delay }}" diff --git a/src/roles/ha_db_hana/tasks/files/constants.yaml b/src/roles/ha_db_hana/tasks/files/constants.yaml index 90305953..9d8a8380 100644 --- a/src/roles/ha_db_hana/tasks/files/constants.yaml +++ b/src/roles/ha_db_hana/tasks/files/constants.yaml @@ -99,6 +99,22 @@ RESOURCE_DEFAULTS: interval: ["0", "0s"] timeout: ["300", "300s"] + angi_topology: + meta_attributes: + clone-node-max: "1" + target-role: "Started" + interleave: "true" + operations: + monitor: + interval: ["50", "50s"] + timeout: ["600", "600s"] + start: + interval: ["0", "0s"] + timeout: ["600", "600s"] + stop: + interval: ["0", "0s"] + timeout: ["300", "300s"] + hana: meta_attributes: notify: "true" @@ -124,6 +140,34 @@ RESOURCE_DEFAULTS: monitor: timeout: ["700", "700s"] + angi_hana: + meta_attributes: + notify: "true" + clone-max: "2" + clone-node-max: "1" + target-role: "Started" + interleave: "true" + priority: "100" + instance_attributes: + PREFER_SITE_TAKEOVER: "true" + DUPLICATE_PRIMARY_TIMEOUT: "7200" + AUTOMATED_REGISTER: "true" + operations: + start: + interval: ["0", "0s"] + timeout: ["3600", "3600s"] + stop: + interval: ["0", "0s"] + timeout: ["3600", "3600s"] + promote: + interval: ["0", "0s"] + timeout: ["3600", "3600s"] + demote: + interval: ["0", "0s"] + timeout: ["320", "320s"] + monitor: + timeout: ["700", "700s"] + ipaddr: meta_attributes: target-role: "Started" @@ -147,9 +191,28 @@ RESOURCE_DEFAULTS: interval: ["0", "0s"] timeout: ["120", "120s"] + angi_filesystem: + meta_attributes: + clone-node-max: "1" + interleave: "true" + operations: + monitor: + interval: ["120", "120s"] + timeout: ["120", "120s"] + start: + interval: ["0", "0s"] + timeout: ["10", "10s"] + stop: + interval: ["0", "0s"] + timeout: ["20", "20s"] + azurelb: meta_attributes: resource-stickiness: "0" + operations: + monitor: + interval: ["10", "10s"] + timeout: ["20", "20s"] REDHAT: fence_agent: @@ -285,14 +348,19 @@ OS_PARAMETERS: # Reading the global.ini file to get the provider and path for the SAPHanaSR resource agent GLOBAL_INI: SUSE: - provider: "SAPHanaSR" - path: ["/usr/share/SAPHanaSR", "/hana/shared/myHooks"] - execution_order: "1" - + SAPHanaSR: + provider: "SAPHanaSR" + path: ["/usr/share/SAPHanaSR", "/hana/shared/myHooks"] + execution_order: "1" + SAPHanaSR-angi: + provider: "susHanaSR" + path: ["/usr/share/SAPHanaSR", "/hana/shared/myHooks"] + execution_order: "1" REDHAT: - provider: "SAPHanaSR" - path: ["/usr/share/SAPHanaSR/srHook", "/hana/shared/myHooks"] - execution_order: "1" + SAPHanaSR: + provider: "SAPHanaSR" + path: ["/usr/share/SAPHanaSR/srHook", "/hana/shared/myHooks"] + execution_order: "1" # === Azure Load Balancer === diff --git a/src/roles/ha_db_hana/tasks/fs-freeze.yml b/src/roles/ha_db_hana/tasks/fs-freeze.yml index 038a1075..aa00388c 100644 --- a/src/roles/ha_db_hana/tasks/fs-freeze.yml +++ b/src/roles/ha_db_hana/tasks/fs-freeze.yml @@ -57,7 +57,7 @@ get_cluster_status_db: operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" register: cluster_status_test_execution retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -72,7 +72,7 @@ get_cluster_status_db: operation_step: "post_failover" database_sid: "{{ db_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" register: cluster_status_post retries: "{{ default_retries }}" delay: "{{ default_delay }}" diff --git a/src/roles/ha_db_hana/tasks/ha-config.yml b/src/roles/ha_db_hana/tasks/ha-config.yml index 9241d4d5..de3aac75 100644 --- a/src/roles/ha_db_hana/tasks/ha-config.yml +++ b/src/roles/ha_db_hana/tasks/ha-config.yml @@ -21,6 +21,10 @@ become: true become_user: root block: + - name: "Get the SAPHanaSR provider" + when: (ansible_os_family | upper) == "SUSE" + ansible.builtin.include_tasks: "roles/misc/tasks/get-saphanasr-provider.yml" + - name: "Retrieve Virtual Machine name" ansible.builtin.uri: url: http://169.254.169.254/metadata/instance?api-version=2021-02-01 @@ -33,11 +37,11 @@ get_pcmk_properties_db: sid: "{{ db_sid | upper }}" instance_number: "{{ db_instance_number }}" - ansible_os_family: "{{ ansible_os_family | upper }}" virtual_machine_name: "{{ azure_instance_metadata.json.compute.name }}" fencing_mechanism: "{{ database_cluster_type }}" os_version: "{{ ansible_distribution_version }}" pcmk_constants: "{{ lookup('file', 'constants.yaml') | from_yaml }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" register: test_result - name: "Set the test case status to PASSED" diff --git a/src/roles/ha_db_hana/tasks/primary-crash-index.yml b/src/roles/ha_db_hana/tasks/primary-crash-index.yml index e94c5123..841c733b 100644 --- a/src/roles/ha_db_hana/tasks/primary-crash-index.yml +++ b/src/roles/ha_db_hana/tasks/primary-crash-index.yml @@ -19,7 +19,6 @@ become: true check_indexserver: database_sid: "{{ db_sid | upper }}" - ansible_os_family: "{{ ansible_os_family | lower }}" register: index_server_check # /*--------------------------------------------------------------------------- @@ -56,7 +55,7 @@ get_cluster_status_db: operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" register: cluster_status_test_execution retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -69,7 +68,7 @@ get_cluster_status_db: operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" register: cluster_status_test_execution retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -117,7 +116,7 @@ get_cluster_status_db: operation_step: "post_failover" database_sid: "{{ db_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" register: cluster_status_post retries: "{{ default_retries }}" delay: "{{ default_delay }}" diff --git a/src/roles/ha_db_hana/tasks/primary-echo-b.yml b/src/roles/ha_db_hana/tasks/primary-echo-b.yml index 76588e11..b54471e8 100644 --- a/src/roles/ha_db_hana/tasks/primary-echo-b.yml +++ b/src/roles/ha_db_hana/tasks/primary-echo-b.yml @@ -47,7 +47,7 @@ get_cluster_status_db: operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" retries: "{{ default_retries }}" delay: "{{ default_delay }}" register: cluster_status_test_execution @@ -60,7 +60,7 @@ get_cluster_status_db: operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" retries: "{{ default_retries }}" delay: "{{ default_delay }}" register: cluster_status_test_execution @@ -104,7 +104,7 @@ get_cluster_status_db: operation_step: "post_failover" database_sid: "{{ db_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" register: cluster_status_post retries: "{{ default_retries }}" delay: "{{ default_delay }}" diff --git a/src/roles/ha_db_hana/tasks/primary-node-crash.yml b/src/roles/ha_db_hana/tasks/primary-node-crash.yml index 616a09bb..e4eb5224 100644 --- a/src/roles/ha_db_hana/tasks/primary-node-crash.yml +++ b/src/roles/ha_db_hana/tasks/primary-node-crash.yml @@ -43,7 +43,7 @@ get_cluster_status_db: operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" register: cluster_status_test_execution retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -85,7 +85,7 @@ get_cluster_status_db: operation_step: "post_failover" database_sid: "{{ db_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" register: cluster_status_post retries: "{{ default_retries }}" delay: "{{ default_delay }}" diff --git a/src/roles/ha_db_hana/tasks/primary-node-kill.yml b/src/roles/ha_db_hana/tasks/primary-node-kill.yml index d727fa88..19d8fa26 100644 --- a/src/roles/ha_db_hana/tasks/primary-node-kill.yml +++ b/src/roles/ha_db_hana/tasks/primary-node-kill.yml @@ -44,7 +44,7 @@ get_cluster_status_db: operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" register: cluster_status_test_execution retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -59,7 +59,7 @@ get_cluster_status_db: operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" register: cluster_status_test_execution retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -102,7 +102,7 @@ get_cluster_status_db: operation_step: "post_failover" database_sid: "{{ db_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" register: cluster_status_post retries: "{{ default_retries }}" delay: "{{ default_delay }}" diff --git a/src/roles/ha_db_hana/tasks/resource-migration.yml b/src/roles/ha_db_hana/tasks/resource-migration.yml index d35c3f9e..5afc7fdd 100644 --- a/src/roles/ha_db_hana/tasks/resource-migration.yml +++ b/src/roles/ha_db_hana/tasks/resource-migration.yml @@ -31,7 +31,28 @@ test_execution_start: "{{ now(utc=true, fmt='%Y-%m-%d %H:%M:%S') }}" test_execution_hostname: "{{ hostvars[cluster_status_pre.primary_node].ansible_hostname }}" + - name: "Test Execution: Get HANA resource id for saphanasr_angi" + block: + - name: "Test Execution: Get HANA resource id for saphanasr_angi" + when: saphanasr_provider | default('SAPHanaSR') == "SAPHanaSR-angi" + ansible.builtin.shell: >- + set -o pipefail && {{ commands + | selectattr('name','equalto','get_hana_resource_id_saphanasr_angi') + | map(attribute=(ansible_os_family|upper)) + | first + }} + args: + executable: /bin/bash + changed_when: false + register: hana_resource_id + failed_when: hana_resource_id.rc != 0 + + - name: "Test Execution: Set fact the hana_resource_name" + ansible.builtin.set_fact: + hana_resource_name: "{{ hana_resource_id.stdout }}" + - name: "Test Execution: Get HANA resource id" + when: saphanasr_provider | default('SAPHanaSR') == "SAPHanaSR" block: - name: "Try master resource ID" ansible.builtin.shell: >- @@ -81,7 +102,7 @@ get_cluster_status_db: operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" register: cluster_status_test_execution retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -123,7 +144,6 @@ - name: "Test Execution: Remove any location_constraints" location_constraints: action: "remove" - ansible_os_family: "{{ ansible_os_family | upper}}" register: location_constraints_result # This is required because the cluster reports incorrect location constraints @@ -136,7 +156,7 @@ get_cluster_status_db: operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" register: cluster_status_test_execution_1 retries: "{{ default_retries }}" delay: "{{ default_delay }}" diff --git a/src/roles/ha_db_hana/tasks/sbd-fencing.yml b/src/roles/ha_db_hana/tasks/sbd-fencing.yml index 1ea81653..9772fe8c 100644 --- a/src/roles/ha_db_hana/tasks/sbd-fencing.yml +++ b/src/roles/ha_db_hana/tasks/sbd-fencing.yml @@ -58,7 +58,7 @@ get_cluster_status_db: operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" retries: "{{ default_retries }}" delay: "{{ default_delay }}" register: cluster_status_test_execution @@ -73,7 +73,7 @@ get_cluster_status_db: operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" register: cluster_status_test_post retries: "{{ default_retries }}" delay: "{{ default_delay }}" diff --git a/src/roles/ha_db_hana/tasks/secondary-crash-index.yml b/src/roles/ha_db_hana/tasks/secondary-crash-index.yml index c7d0a0bf..4b602de9 100644 --- a/src/roles/ha_db_hana/tasks/secondary-crash-index.yml +++ b/src/roles/ha_db_hana/tasks/secondary-crash-index.yml @@ -18,7 +18,6 @@ become: true check_indexserver: database_sid: "{{ db_sid | upper }}" - ansible_os_family: "{{ ansible_os_family | lower }}" register: index_server_check # /*--------------------------------------------------------------------------- @@ -55,8 +54,8 @@ - name: "Test Execution: Validate HANA DB cluster status" get_cluster_status_db: operation_step: "test_execution" - ansible_os_family: "{{ ansible_os_family | upper }}" database_sid: "{{ db_sid | lower }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" register: cluster_status_test_execution retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -68,7 +67,7 @@ get_cluster_status_db: operation_step: "post_failover" database_sid: "{{ db_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" register: cluster_status_post retries: "{{ default_retries }}" delay: "{{ default_delay }}" diff --git a/src/roles/ha_db_hana/tasks/secondary-echo-b.yml b/src/roles/ha_db_hana/tasks/secondary-echo-b.yml index b6dbb560..d566b718 100644 --- a/src/roles/ha_db_hana/tasks/secondary-echo-b.yml +++ b/src/roles/ha_db_hana/tasks/secondary-echo-b.yml @@ -51,7 +51,7 @@ get_cluster_status_db: operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" retries: "{{ default_retries }}" delay: "{{ default_delay }}" register: cluster_status_test_execution @@ -63,7 +63,7 @@ get_cluster_status_db: operation_step: "post_failover" database_sid: "{{ db_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" register: cluster_status_post retries: "{{ default_retries }}" delay: "{{ default_delay }}" diff --git a/src/roles/ha_db_hana/tasks/secondary-node-kill.yml b/src/roles/ha_db_hana/tasks/secondary-node-kill.yml index 86e24ad5..d5c4c4fd 100644 --- a/src/roles/ha_db_hana/tasks/secondary-node-kill.yml +++ b/src/roles/ha_db_hana/tasks/secondary-node-kill.yml @@ -48,8 +48,8 @@ - name: "Test Execution: Validate HANA DB cluster status" get_cluster_status_db: operation_step: "test_execution" - ansible_os_family: "{{ ansible_os_family | upper }}" database_sid: "{{ db_sid | lower }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" register: cluster_status_test_execution retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -59,9 +59,9 @@ - name: "Test execution: Validate HANA DB cluster status 2" get_cluster_status_db: - operation_step: "post_failover" - ansible_os_family: "{{ ansible_os_family | upper }}" + operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" register: cluster_status_post retries: "{{ default_retries }}" delay: "{{ default_delay }}" diff --git a/src/roles/ha_scs/tasks/ascs-migration.yml b/src/roles/ha_scs/tasks/ascs-migration.yml index 6844c7b8..c3891986 100644 --- a/src/roles/ha_scs/tasks/ascs-migration.yml +++ b/src/roles/ha_scs/tasks/ascs-migration.yml @@ -40,7 +40,6 @@ - name: "Test Execution: Validate SCS cluster status" get_cluster_status_scs: sap_sid: "{{ sap_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" register: cluster_status_test_execution retries: "{{ default_retries }}" delay: "{{ default_delay }}" diff --git a/src/roles/ha_scs/tasks/ascs-node-crash.yml b/src/roles/ha_scs/tasks/ascs-node-crash.yml index 46325d7d..ed74d558 100644 --- a/src/roles/ha_scs/tasks/ascs-node-crash.yml +++ b/src/roles/ha_scs/tasks/ascs-node-crash.yml @@ -49,7 +49,6 @@ - name: "Test Execution: Validate ASCS node has stopped" get_cluster_status_scs: sap_sid: "{{ sap_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" register: cluster_status_test_execution_pre retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -59,7 +58,6 @@ when: hostvars[cluster_status_pre.ascs_node].ensa2_check.stdout == "" get_cluster_status_scs: sap_sid: "{{ sap_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" register: cluster_status_test_execution retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -71,7 +69,6 @@ when: hostvars[cluster_status_pre.ascs_node].ensa2_check.stdout != "" get_cluster_status_scs: sap_sid: "{{ sap_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" register: cluster_status_test_execution retries: "{{ default_retries }}" delay: "{{ default_delay }}" diff --git a/src/roles/ha_scs/tasks/block-network.yml b/src/roles/ha_scs/tasks/block-network.yml index a561a16a..293865ea 100644 --- a/src/roles/ha_scs/tasks/block-network.yml +++ b/src/roles/ha_scs/tasks/block-network.yml @@ -87,7 +87,6 @@ - name: "Test Execution: Validate SCS cluster status" get_cluster_status_scs: sap_sid: "{{ sap_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" register: cluster_status_test_execution retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -118,7 +117,6 @@ - name: "Test Execution: Validate SCS cluster status" get_cluster_status_scs: sap_sid: "{{ sap_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" register: cluster_status_test_execution retries: "{{ default_retries }}" delay: "{{ default_delay }}" diff --git a/src/roles/ha_scs/tasks/ha-config.yml b/src/roles/ha_scs/tasks/ha-config.yml index 6a846045..3136f21b 100644 --- a/src/roles/ha_scs/tasks/ha-config.yml +++ b/src/roles/ha_scs/tasks/ha-config.yml @@ -24,7 +24,6 @@ sid: "{{ sap_sid | upper }}" ascs_instance_number: "{{ scs_instance_number }}" ers_instance_number: "{{ ers_instance_number }}" - ansible_os_family: "{{ ansible_os_family | upper }}" virtual_machine_name: "{{ azure_instance_metadata.json.compute.name }}" pcmk_constants: "{{ lookup('file', 'constants.yaml') | from_yaml }}" fencing_mechanism: "{{ scs_cluster_type }}" diff --git a/src/roles/ha_scs/tasks/ha-failover-to-node.yml b/src/roles/ha_scs/tasks/ha-failover-to-node.yml index a14c47c3..0f86123d 100644 --- a/src/roles/ha_scs/tasks/ha-failover-to-node.yml +++ b/src/roles/ha_scs/tasks/ha-failover-to-node.yml @@ -46,7 +46,6 @@ - name: "Test Execution: Validate SCS cluster status" get_cluster_status_scs: sap_sid: "{{ sap_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" register: cluster_status_test_execution retries: "{{ default_retries }}" delay: "{{ default_delay }}" diff --git a/src/roles/ha_scs/tasks/kill-enqueue-replication.yml b/src/roles/ha_scs/tasks/kill-enqueue-replication.yml index b181f4cb..d99f1009 100644 --- a/src/roles/ha_scs/tasks/kill-enqueue-replication.yml +++ b/src/roles/ha_scs/tasks/kill-enqueue-replication.yml @@ -51,7 +51,6 @@ - name: "Test Execution: Validate ERS node is not running" get_cluster_status_scs: sap_sid: "{{ sap_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" register: cluster_status_test_execution_pre retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -65,7 +64,6 @@ - name: "Test Execution: Validate SCS cluster status" get_cluster_status_scs: sap_sid: "{{ sap_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" register: cluster_status_test_execution retries: "{{ default_retries }}" delay: "{{ default_delay }}" diff --git a/src/roles/ha_scs/tasks/kill-enqueue-server.yml b/src/roles/ha_scs/tasks/kill-enqueue-server.yml index 8c0d811d..4fb810dd 100644 --- a/src/roles/ha_scs/tasks/kill-enqueue-server.yml +++ b/src/roles/ha_scs/tasks/kill-enqueue-server.yml @@ -51,7 +51,6 @@ - name: "Test Execution: Validate ASCS node has stopped" get_cluster_status_scs: sap_sid: "{{ sap_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" register: cluster_status_test_execution_pre retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -66,7 +65,6 @@ when: ensa2_check.stdout == "0" get_cluster_status_scs: sap_sid: "{{ sap_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" register: cluster_status_test_execution retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -78,7 +76,6 @@ when: ensa2_check.stdout != "0" get_cluster_status_scs: sap_sid: "{{ sap_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" register: cluster_status_test_execution retries: "{{ default_retries }}" delay: "{{ default_delay }}" diff --git a/src/roles/ha_scs/tasks/kill-message-server.yml b/src/roles/ha_scs/tasks/kill-message-server.yml index dcda2d70..36a4a8ca 100644 --- a/src/roles/ha_scs/tasks/kill-message-server.yml +++ b/src/roles/ha_scs/tasks/kill-message-server.yml @@ -47,7 +47,6 @@ - name: "Test Execution: Validate ASCS node has stopped" get_cluster_status_scs: sap_sid: "{{ sap_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" register: cluster_status_test_execution_pre retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -57,7 +56,6 @@ when: ensa2_check.stdout == "" get_cluster_status_scs: sap_sid: "{{ sap_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" register: cluster_status_test_execution retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -69,7 +67,6 @@ when: ensa2_check.stdout != "1" get_cluster_status_scs: sap_sid: "{{ sap_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" register: cluster_status_test_execution retries: "{{ default_retries }}" delay: "{{ default_delay }}" diff --git a/src/roles/ha_scs/tasks/kill-sapstartsrv-process.yml b/src/roles/ha_scs/tasks/kill-sapstartsrv-process.yml index f1a63db8..d9703a36 100644 --- a/src/roles/ha_scs/tasks/kill-sapstartsrv-process.yml +++ b/src/roles/ha_scs/tasks/kill-sapstartsrv-process.yml @@ -64,7 +64,6 @@ - name: "Test Execution: Validate SCS cluster status" get_cluster_status_scs: sap_sid: "{{ sap_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" register: cluster_status_test_execution retries: "{{ default_retries }}" delay: "{{ default_delay }}" diff --git a/src/roles/ha_scs/tasks/manual-restart.yml b/src/roles/ha_scs/tasks/manual-restart.yml index 580d3f77..66f5b6d2 100644 --- a/src/roles/ha_scs/tasks/manual-restart.yml +++ b/src/roles/ha_scs/tasks/manual-restart.yml @@ -46,7 +46,6 @@ - name: "Test Execution: Validate SCS cluster status 1" get_cluster_status_scs: sap_sid: "{{ sap_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" register: cluster_status_test_execution retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -74,7 +73,6 @@ - name: "Test Execution: Validate SCS cluster status 2" get_cluster_status_scs: sap_sid: "{{ sap_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" register: cluster_status_test_execution_2 retries: "{{ default_retries }}" delay: "{{ default_delay }}" diff --git a/src/roles/misc/tasks/cluster-report.yml b/src/roles/misc/tasks/cluster-report.yml index a7d844bd..61fec66a 100644 --- a/src/roles/misc/tasks/cluster-report.yml +++ b/src/roles/misc/tasks/cluster-report.yml @@ -10,12 +10,13 @@ get_cluster_status_db: operation_step: "cluster_report_collection" database_sid: "{{ db_sid | lower | default('') }}" - ansible_os_family: "{{ ansible_os_family | upper }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" register: cluster_status failed_when: cluster_status.primary_node == "" - name: "Get the cluster report from the primary node" become: true + run_once: true when: - cluster_status is defined - cluster_status.primary_node == ansible_hostname diff --git a/src/roles/misc/tasks/get-saphanasr-provider.yml b/src/roles/misc/tasks/get-saphanasr-provider.yml new file mode 100644 index 00000000..135230ea --- /dev/null +++ b/src/roles/misc/tasks/get-saphanasr-provider.yml @@ -0,0 +1,28 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +# /*--------------------------------------------------------------------------- +# | Get SAPHanaSR Provider (on SUSE only) | +# +--------------------------------------------------------------------------*/ + +- name: Get SAPHanaSR provider for SUSE + when: (ansible_os_family | upper) == "SUSE" + block: + - name: "Get SAPHanaSR Provider using command" + become: true + become_user: "{{ db_sid | lower }}adm" + ansible.builtin.command: "SAPHanaSR-manageProvider --sid {{ db_sid | upper }} --show --provider=sushanasr" + register: hanasr_command_output + changed_when: false + args: + chdir: "/usr/sbin" + + - name: "Set SAPHanaSR-angi Provider" + when: hanasr_command_output.stdout != 0 + ansible.builtin.set_fact: + saphanasr_provider: "SAPHanaSR-angi" + + - name: "Set SAPHanaSR Provider" + when: hanasr_command_output.stdout == "" + ansible.builtin.set_fact: + saphanasr_provider: "SAPHanaSR" diff --git a/src/roles/misc/tasks/post-validations.yml b/src/roles/misc/tasks/post-validations.yml index f3ea15d9..c8a1dcec 100644 --- a/src/roles/misc/tasks/post-validations.yml +++ b/src/roles/misc/tasks/post-validations.yml @@ -34,7 +34,6 @@ - name: "Merge and sort logs from all nodes by timestamp" log_parser: function: "merge_logs" - ansible_os_family: "{{ ansible_os_family | upper }}" logs: - "{{ hostvars[primary_node]['var_log_messages_output'].filtered_logs | default('[]') }}" - "{{ hostvars[secondary_node]['var_log_messages_output'].filtered_logs | default('[]') }}" diff --git a/src/roles/misc/tasks/pre-validations-db.yml b/src/roles/misc/tasks/pre-validations-db.yml index 2f1d1033..86f47929 100644 --- a/src/roles/misc/tasks/pre-validations-db.yml +++ b/src/roles/misc/tasks/pre-validations-db.yml @@ -12,16 +12,23 @@ become: true location_constraints: action: "remove" - ansible_os_family: "{{ ansible_os_family | upper}}" register: location_constraints_results + - name: "Pre validation: Get SAPHanaSR provider" + when: (ansible_os_family | upper) == "SUSE" + ansible.builtin.include_tasks: "roles/misc/tasks/get-saphanasr-provider.yml" + - name: "Pre Validation: Validate HANA DB cluster status on primary node" become: true get_cluster_status_db: operation_step: "pre_failover" - ansible_os_family: "{{ ansible_os_family | upper }}" database_sid: "{{ db_sid | lower }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" register: cluster_status_pre + until: cluster_status_pre.primary_node != "" or + cluster_status_pre.secondary_node != "" + timeout: 5 + retries: 3 - name: "Pre Validation: CleanUp any failed resource" become: true diff --git a/src/roles/misc/tasks/pre-validations-scs.yml b/src/roles/misc/tasks/pre-validations-scs.yml index 660a8044..0df292c5 100644 --- a/src/roles/misc/tasks/pre-validations-scs.yml +++ b/src/roles/misc/tasks/pre-validations-scs.yml @@ -11,7 +11,6 @@ - name: "Pre Validation: Validate SCS cluster status on ASCS node" get_cluster_status_scs: sap_sid: "{{ sap_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" become: true register: cluster_status_pre diff --git a/src/roles/misc/tasks/rescue.yml b/src/roles/misc/tasks/rescue.yml index 01e4f788..99cf9115 100644 --- a/src/roles/misc/tasks/rescue.yml +++ b/src/roles/misc/tasks/rescue.yml @@ -40,7 +40,6 @@ delegate_to: localhost log_parser: function: "merge_logs" - ansible_os_family: "{{ ansible_os_family | upper }}" logs: - "{{ hostvars[first_node]['var_log_messages_output'].filtered_logs | default('[]') }}" - "{{ hostvars[second_node]['var_log_messages_output'].filtered_logs | default('[]') }}" diff --git a/src/roles/misc/tasks/var-log-messages.yml b/src/roles/misc/tasks/var-log-messages.yml index bbda82b2..aa8ecb0f 100644 --- a/src/roles/misc/tasks/var-log-messages.yml +++ b/src/roles/misc/tasks/var-log-messages.yml @@ -12,6 +12,5 @@ log_parser: start_time: "{{ test_execution_start | default(test_case_start_time_epoch) }}" end_time: "{{ now(utc=true, fmt='%Y-%m-%d %H:%M:%S') }}" - ansible_os_family: "{{ ansible_os_family | upper }}" function: "parse_logs" register: var_log_messages_output diff --git a/src/vars/input-api.yaml b/src/vars/input-api.yaml index ce6e527c..5867ebf9 100644 --- a/src/vars/input-api.yaml +++ b/src/vars/input-api.yaml @@ -253,6 +253,9 @@ commands: SUSE: "cibadmin --query --xpath \"//primitive[@type='SAPHana']\" --node-path | grep -oP \"master\\[@id='\\K[^']+\"" REDHAT: "cibadmin --query --xpath \"//primitive[@type='SAPHana']\" --node-path | grep -oP \"clone\\[@id='\\K[^']+\"" + - name: get_hana_resource_id_saphanasr_angi + SUSE: "cibadmin --query --xpath \"//primitive[@type='SAPHanaController']\" --node-path | grep -oP \"primitive\\[@id='\\K[^']+\"" + - name: resource_migration_cmd SUSE: "crm resource move {{ hana_resource_name | default('msl_SAPHana_' ~ (db_sid | upper) ~ '_HDB' ~ db_instance_number) }} {{ cluster_status_pre.secondary_node | default('') }} force" REDHAT: "pcs resource move {{ hana_resource_name | default('SAPHana_' ~ (db_sid | upper) ~ '_' ~ db_instance_number ~ '-clone') }} --master" diff --git a/tests/module_utils/filter_tests_test.py b/tests/module_utils/filter_tests_test.py new file mode 100644 index 00000000..6556cd56 --- /dev/null +++ b/tests/module_utils/filter_tests_test.py @@ -0,0 +1,424 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +""" +Unit tests for the filter_tests module. +""" + +import json +import tempfile +import os +import pytest +import yaml +from src.module_utils.filter_tests import TestFilter + + +class TestTestFilter: + """ + Test class for the TestFilter class. + """ + + @pytest.fixture + def sample_config(self): + """ + Fixture providing sample test configuration data. + + :return: Sample configuration dictionary + :rtype: dict + """ + return { + "sap_functional_test_type_map": [ + {"name": "DatabaseHighAvailability", "value": "HA_DB"}, + {"name": "CentralServicesHighAvailability", "value": "HA_SCS"}, + ], + "test_groups": [ + { + "name": "HA_DB_HANA", + "test_cases": [ + { + "name": "HA Parameters Validation", + "task_name": "ha-config", + "description": "Validates HA configuration", + "enabled": True, + }, + { + "name": "Azure Load Balancer Validation", + "task_name": "azure-lb", + "description": "Validates Azure LB setup", + "enabled": True, + }, + { + "name": "Primary Node Crash", + "task_name": "primary-node-crash", + "description": "Simulates primary node crash", + "enabled": True, + }, + ], + }, + { + "name": "HA_SCS", + "test_cases": [ + { + "name": "SAPControl Config Validation", + "task_name": "sapcontrol-config", + "description": "Validates SAPControl config", + "enabled": True, + }, + { + "name": "ASCS Node Crash", + "task_name": "ascs-node-crash", + "description": "Simulates ASCS node crash", + "enabled": True, + }, + ], + }, + ], + "sap_sid": "HDB", + "db_sid": "HDB", + "default_retries": 50, + } + + @pytest.fixture + def temp_yaml_file(self, sample_config): + """ + Fixture providing a temporary YAML file with sample configuration. + + :param sample_config: Sample configuration data + :type sample_config: dict + :return: Path to temporary YAML file + :rtype: str + """ + with tempfile.NamedTemporaryFile(mode="w", suffix=".yaml", delete=False) as f: + yaml.dump(sample_config, f) + return f.name + + def test_init_with_valid_file(self, temp_yaml_file, sample_config): + """ + Test initialization with a valid YAML file. + + :param temp_yaml_file: Path to temporary YAML file + :type temp_yaml_file: str + :param sample_config: Expected configuration data + :type sample_config: dict + """ + try: + filter_obj = TestFilter(temp_yaml_file) + assert filter_obj.input_file == temp_yaml_file + assert filter_obj.config == sample_config + finally: + os.unlink(temp_yaml_file) + + def test_init_with_nonexistent_file(self, capsys): + """ + Test initialization with a non-existent file. + + :param capsys: Pytest fixture to capture stdout/stderr + :type capsys: pytest.CaptureFixture + """ + with pytest.raises(SystemExit) as exc_info: + TestFilter("nonexistent_file.yaml") + assert exc_info.value.code == 1 + captured = capsys.readouterr() + assert "Error: Configuration file nonexistent_file.yaml not found" in captured.err + + def test_init_with_invalid_yaml(self, capsys): + """ + Test initialization with an invalid YAML file. + + :param capsys: Pytest fixture to capture stdout/stderr + :type capsys: pytest.CaptureFixture + """ + with tempfile.NamedTemporaryFile(mode="w", suffix=".yaml", delete=False) as f: + f.write("invalid: yaml: content: [unclosed") + temp_file = f.name + try: + with pytest.raises(SystemExit) as exc_info: + TestFilter(temp_file) + assert exc_info.value.code == 1 + captured = capsys.readouterr() + assert f"Error parsing YAML file {temp_file}" in captured.err + finally: + os.unlink(temp_file) + + def test_filter_tests_no_filters(self, temp_yaml_file, sample_config): + """ + Test filter_tests with no filters applied. + + :param temp_yaml_file: Path to temporary YAML file + :type temp_yaml_file: str + :param sample_config: Expected configuration data + :type sample_config: dict + """ + try: + filter_obj = TestFilter(temp_yaml_file) + result = filter_obj.filter_tests() + result_dict = json.loads(result) + assert result_dict == sample_config + finally: + os.unlink(temp_yaml_file) + + def test_filter_tests_by_group(self, temp_yaml_file): + """ + Test filter_tests with a specific test group. + + :param temp_yaml_file: Path to temporary YAML file + :type temp_yaml_file: str + """ + try: + filter_obj = TestFilter(temp_yaml_file) + result = filter_obj.filter_tests(test_group="HA_DB_HANA") + result_dict = json.loads(result) + ha_db_group = next(g for g in result_dict["test_groups"] if g["name"] == "HA_DB_HANA") + for test_case in ha_db_group["test_cases"]: + assert test_case["enabled"] is True + ha_scs_group = next(g for g in result_dict["test_groups"] if g["name"] == "HA_SCS") + for test_case in ha_scs_group["test_cases"]: + assert test_case["enabled"] is False + finally: + os.unlink(temp_yaml_file) + + def test_filter_tests_by_cases(self, temp_yaml_file): + """ + Test filter_tests with specific test cases. + + :param temp_yaml_file: Path to temporary YAML file + :type temp_yaml_file: str + """ + try: + filter_obj = TestFilter(temp_yaml_file) + result = filter_obj.filter_tests(test_cases=["ha-config", "ascs-node-crash"]) + result_dict = json.loads(result) + for group in result_dict["test_groups"]: + for test_case in group["test_cases"]: + if test_case["task_name"] in ["ha-config", "ascs-node-crash"]: + assert test_case["enabled"] is True + else: + assert test_case["enabled"] is False + finally: + os.unlink(temp_yaml_file) + + def test_filter_tests_by_group_and_cases(self, temp_yaml_file): + """ + Test filter_tests with both test group and specific test cases. + + :param temp_yaml_file: Path to temporary YAML file + :type temp_yaml_file: str + """ + try: + filter_obj = TestFilter(temp_yaml_file) + result = filter_obj.filter_tests( + test_group="HA_DB_HANA", test_cases=["ha-config", "azure-lb"] + ) + result_dict = json.loads(result) + ha_db_group = next(g for g in result_dict["test_groups"] if g["name"] == "HA_DB_HANA") + assert len(ha_db_group["test_cases"]) == 2 + expected_tasks = {"ha-config", "azure-lb"} + actual_tasks = {tc["task_name"] for tc in ha_db_group["test_cases"]} + assert actual_tasks == expected_tasks + for test_case in ha_db_group["test_cases"]: + assert test_case["enabled"] is True + finally: + os.unlink(temp_yaml_file) + + def test_get_ansible_vars_no_filters(self, temp_yaml_file, sample_config): + """ + Test get_ansible_vars with no filters applied. + + :param temp_yaml_file: Path to temporary YAML file + :type temp_yaml_file: str + :param sample_config: Expected configuration data + :type sample_config: dict + """ + try: + filter_obj = TestFilter(temp_yaml_file) + result = filter_obj.get_ansible_vars() + result_dict = json.loads(result) + assert "test_groups" in result_dict + assert result_dict["test_groups"] == sample_config["test_groups"] + finally: + os.unlink(temp_yaml_file) + + def test_get_ansible_vars_with_filters(self, temp_yaml_file): + """ + Test get_ansible_vars with filters applied. + + :param temp_yaml_file: Path to temporary YAML file + :type temp_yaml_file: str + """ + try: + filter_obj = TestFilter(temp_yaml_file) + result = filter_obj.get_ansible_vars(test_group="HA_SCS") + result_dict = json.loads(result) + assert "test_groups" in result_dict + ha_scs_group = next(g for g in result_dict["test_groups"] if g["name"] == "HA_SCS") + for test_case in ha_scs_group["test_cases"]: + assert test_case["enabled"] is True + ha_db_group = next(g for g in result_dict["test_groups"] if g["name"] == "HA_DB_HANA") + for test_case in ha_db_group["test_cases"]: + assert test_case["enabled"] is False + finally: + os.unlink(temp_yaml_file) + + def test_main_function_insufficient_args(self, monkeypatch, capsys): + """ + Test main function with insufficient arguments. + + :param monkeypatch: Pytest monkeypatch fixture + :type monkeypatch: pytest.MonkeyPatch + :param capsys: Pytest fixture to capture stdout/stderr + :type capsys: pytest.CaptureFixture + """ + with monkeypatch.context() as m: + m.setattr("sys.argv", ["filter_tests.py"]) + with pytest.raises(SystemExit) as exc_info: + from src.module_utils.filter_tests import main + + main() + assert exc_info.value.code == 1 + captured = capsys.readouterr() + assert "Usage: python filter_tests.py" in captured.err + + def test_main_function_with_input_file_only(self, monkeypatch, temp_yaml_file, capsys): + """ + Test main function with only input file argument. + + :param monkeypatch: Pytest monkeypatch fixture + :type monkeypatch: pytest.MonkeyPatch + :param temp_yaml_file: Path to temporary YAML file + :type temp_yaml_file: str + :param capsys: Pytest fixture to capture stdout/stderr + :type capsys: pytest.CaptureFixture + """ + try: + with monkeypatch.context() as m: + m.setattr("sys.argv", ["filter_tests.py", temp_yaml_file]) + from src.module_utils.filter_tests import main + + main() + captured = capsys.readouterr() + result = json.loads(captured.out) + assert "test_groups" in result + finally: + os.unlink(temp_yaml_file) + + def test_main_function_with_test_group(self, monkeypatch, temp_yaml_file, capsys): + """ + Test main function with test group specified. + + :param monkeypatch: Pytest monkeypatch fixture + :type monkeypatch: pytest.MonkeyPatch + :param temp_yaml_file: Path to temporary YAML file + :type temp_yaml_file: str + :param capsys: Pytest fixture to capture stdout/stderr + :type capsys: pytest.CaptureFixture + """ + try: + with monkeypatch.context() as m: + m.setattr("sys.argv", ["filter_tests.py", temp_yaml_file, "HA_DB_HANA"]) + from src.module_utils.filter_tests import main + + main() + captured = capsys.readouterr() + result = json.loads(captured.out) + assert "test_groups" in result + finally: + os.unlink(temp_yaml_file) + + def test_main_function_with_test_cases(self, monkeypatch, temp_yaml_file, capsys): + """ + Test main function with test cases specified. + + :param monkeypatch: Pytest monkeypatch fixture + :type monkeypatch: pytest.MonkeyPatch + :param temp_yaml_file: Path to temporary YAML file + :type temp_yaml_file: str + :param capsys: Pytest fixture to capture stdout/stderr + :type capsys: pytest.CaptureFixture + """ + try: + with monkeypatch.context() as m: + m.setattr( + "sys.argv", ["filter_tests.py", temp_yaml_file, "null", "ha-config,azure-lb"] + ) + from src.module_utils.filter_tests import main + + main() + captured = capsys.readouterr() + result = json.loads(captured.out) + assert "test_groups" in result + finally: + os.unlink(temp_yaml_file) + + def test_main_function_with_null_values(self, monkeypatch, temp_yaml_file, capsys): + """ + Test main function with null values. + + :param monkeypatch: Pytest monkeypatch fixture + :type monkeypatch: pytest.MonkeyPatch + :param temp_yaml_file: Path to temporary YAML file + :type temp_yaml_file: str + :param capsys: Pytest fixture to capture stdout/stderr + :type capsys: pytest.CaptureFixture + """ + try: + with monkeypatch.context() as m: + m.setattr("sys.argv", ["filter_tests.py", temp_yaml_file, "null", "null"]) + from src.module_utils.filter_tests import main + + main() + captured = capsys.readouterr() + result = json.loads(captured.out) + assert "test_groups" in result + finally: + os.unlink(temp_yaml_file) + + def test_filter_tests_nonexistent_group(self, temp_yaml_file, sample_config): + """ + Test filter_tests with a non-existent test group. + + :param temp_yaml_file: Path to temporary YAML file + :type temp_yaml_file: str + :param sample_config: Sample configuration data + :type sample_config: dict + """ + try: + filter_obj = TestFilter(temp_yaml_file) + result = filter_obj.filter_tests(test_group="NONEXISTENT_GROUP") + result_dict = json.loads(result) + for group in result_dict["test_groups"]: + for test_case in group["test_cases"]: + assert test_case["enabled"] is False + finally: + os.unlink(temp_yaml_file) + + def test_filter_tests_nonexistent_cases(self, temp_yaml_file): + """ + Test filter_tests with non-existent test cases. + + :param temp_yaml_file: Path to temporary YAML file + :type temp_yaml_file: str + """ + try: + filter_obj = TestFilter(temp_yaml_file) + result = filter_obj.filter_tests(test_cases=["nonexistent-case"]) + result_dict = json.loads(result) + for group in result_dict["test_groups"]: + for test_case in group["test_cases"]: + assert test_case["enabled"] is False + finally: + os.unlink(temp_yaml_file) + + def test_config_copy_independence(self, temp_yaml_file): + """ + Test that filtered configuration doesn't modify the original. + + :param temp_yaml_file: Path to temporary YAML file + :type temp_yaml_file: str + """ + try: + filter_obj = TestFilter(temp_yaml_file) + original_config = filter_obj.config.copy() + filter_obj.filter_tests(test_group="HA_DB_HANA") + assert filter_obj.config == original_config + finally: + os.unlink(temp_yaml_file) diff --git a/tests/module_utils/get_cluster_status_test.py b/tests/module_utils/get_cluster_status_test.py index 98c5eb6e..e601ddfa 100644 --- a/tests/module_utils/get_cluster_status_test.py +++ b/tests/module_utils/get_cluster_status_test.py @@ -5,10 +5,12 @@ Unit tests for the get_cluster_status module. """ +import logging import xml.etree.ElementTree as ET from typing import Dict, Any import pytest from src.module_utils.get_cluster_status import BaseClusterStatusChecker +from src.module_utils.enums import OperatingSystemFamily class TestableBaseClusterChecker(BaseClusterStatusChecker): @@ -21,12 +23,12 @@ def __init__(self, ansible_os_family=""): self.test_ready = False self.test_stable = False - def _process_node_attributes(self, node_attributes: ET.Element) -> Dict[str, Any]: + def _process_node_attributes(self, cluster_status_xml: ET.Element) -> Dict[str, Any]: """ Process node attributes and return a dictionary with node information. - :param node_attributes: XML element containing node attributes. - :type node_attributes: ET.Element + :param cluster_status_xml: XML element containing cluster status. + :type cluster_status_xml: ET.Element :return: Dictionary with node information. :rtype: Dict[str, Any] """ @@ -64,9 +66,9 @@ def base_checker(self): :return: Instance of TestableBaseClusterChecker. :rtype: TestableBaseClusterChecker """ - return TestableBaseClusterChecker(ansible_os_family="REDHAT") + return TestableBaseClusterChecker(ansible_os_family=OperatingSystemFamily.REDHAT) - def test_get_stonith_action_rhel94(self, mocker, base_checker): + def test_get_stonith_action_rhel94(self, mocker, base_checker: TestableBaseClusterChecker): """ Test the _get_stonith_action method when the command executes successfully. @@ -88,7 +90,7 @@ def test_get_stonith_action_rhel94(self, mocker, base_checker): mock_execute.assert_called_once() assert base_checker.result["stonith_action"] == return_value - def test_get_stonith_action(self, mocker, base_checker): + def test_get_stonith_action(self, mocker, base_checker: TestableBaseClusterChecker): """ Test the _get_stonith_action method when the command executes successfully. @@ -110,7 +112,7 @@ def test_get_stonith_action(self, mocker, base_checker): mock_execute.assert_called_once() assert base_checker.result["stonith_action"] == return_value - def test_get_stonith_action_exception(self, mocker, base_checker): + def test_get_stonith_action_exception(self, mocker, base_checker: TestableBaseClusterChecker): """ Test the _get_stonith_action method when the command raises an exception. @@ -128,7 +130,9 @@ def test_get_stonith_action_exception(self, mocker, base_checker): mock_execute.assert_called_once() assert base_checker.result["stonith_action"] == "unknown" - def test_validate_cluster_basic_status_success(self, mocker, base_checker): + def test_validate_cluster_basic_status_success( + self, mocker, base_checker: TestableBaseClusterChecker + ): """ Test _validate_cluster_basic_status method with a successful cluster status. @@ -156,7 +160,9 @@ def test_validate_cluster_basic_status_success(self, mocker, base_checker): assert base_checker.result["pacemaker_status"] == "running" - def test_validate_cluster_basic_status_insufficient_nodes(self, mocker, base_checker): + def test_validate_cluster_basic_status_insufficient_nodes( + self, mocker, base_checker: TestableBaseClusterChecker + ): """ Test _validate_cluster_basic_status method with insufficient nodes. @@ -183,7 +189,9 @@ def test_validate_cluster_basic_status_insufficient_nodes(self, mocker, base_che assert "insufficient nodes" in base_checker.result["message"] - def test_validate_cluster_basic_status_offline_node(self, base_checker): + def test_validate_cluster_basic_status_offline_node( + self, base_checker: TestableBaseClusterChecker + ): """ Test _validate_cluster_basic_status method with an offline node. @@ -208,7 +216,7 @@ def test_validate_cluster_basic_status_offline_node(self, base_checker): assert "node2 is not online" in base_checker.result["message"] - def test_run_cluster_ready(self, mocker, base_checker): + def test_run_cluster_ready(self, mocker, base_checker: TestableBaseClusterChecker): """ Test the run method when the cluster is ready. @@ -245,3 +253,152 @@ def test_run_cluster_ready(self, mocker, base_checker): assert result["status"] == "PASSED" assert "end" in result + + def test_run_cluster_unstable(self, mocker, base_checker: TestableBaseClusterChecker): + """ + Test the run method when cluster is ready but not stable. + + :param mocker: Mocking library to patch methods. + :type mocker: mocker.MockerFixture + :param base_checker: Instance of TestableBaseClusterChecker. + :type base_checker: TestableBaseClusterChecker + """ + mocker.patch.object(base_checker, "execute_command_subprocess", return_value="reboot") + + base_checker.test_ready = True + base_checker.test_stable = False # Cluster is not stable + + result = base_checker.run() + + assert result["status"] == "PASSED" + assert "Pacemaker cluster isn't stable" in result["message"] + + def test_run_cluster_not_ready_initially( + self, mocker, base_checker: TestableBaseClusterChecker + ): + """ + Test the run method when cluster is not ready initially but becomes ready. + + :param mocker: Mocking library to patch methods. + :type mocker: mocker.MockerFixture + :param base_checker: Instance of TestableBaseClusterChecker. + :type base_checker: TestableBaseClusterChecker + """ + mock_execute = mocker.patch.object(base_checker, "execute_command_subprocess") + mock_execute.side_effect = [ + "reboot", + """ + + + + + + + + + + + + + """, + "active", + ] + + base_checker.test_ready = False + base_checker.test_stable = True + base_checker.max_ready_calls = 2 + + result = base_checker.run() + + assert result["status"] == "PASSED" + assert "end" in result + + def test_run_cluster_ready_immediately(self, mocker, base_checker: TestableBaseClusterChecker): + """ + Test the run method when the cluster is ready immediately. + + :param mocker: Mocking library to patch methods. + :type mocker: mocker.MockerFixture + :param base_checker: Instance of TestableBaseClusterChecker. + :type base_checker: TestableBaseClusterChecker + """ + mock_execute = mocker.patch.object( + base_checker, "execute_command_subprocess", return_value="reboot" + ) + + base_checker.test_ready = True + base_checker.test_stable = True + + result = base_checker.run() + + assert result["status"] == "PASSED" + assert "end" in result + assert mock_execute.call_count == 1 + + def test_run_method_exception_in_try_block( + self, mocker, base_checker: TestableBaseClusterChecker + ): + """ + Test run method when exception occurs in try block. + + :param mocker: Mocking library to patch methods. + :type mocker: mocker.MockerFixture + :param base_checker: Instance of TestableBaseClusterChecker. + :type base_checker: TestableBaseClusterChecker + """ + mocker.patch.object( + base_checker, "execute_command_subprocess", side_effect=Exception("Test exception") + ) + mock_handle_error = mocker.patch.object(base_checker, "handle_error") + mock_log = mocker.patch.object(base_checker, "log") + + result = base_checker.run() + mock_handle_error.assert_called_once() + + mock_log.assert_any_call(logging.INFO, "Starting cluster status check") + mock_log.assert_any_call(logging.INFO, "Cluster status check completed") + assert result["status"] == "PASSED" + assert "end" in result + + def test_run_method_while_loop_multiple_iterations( + self, mocker, base_checker: TestableBaseClusterChecker + ): + """ + Test run method with multiple while loop iterations. + + :param mocker: Mocking library to patch methods. + :type mocker: mocker.MockerFixture + :param base_checker: Instance of TestableBaseClusterChecker. + :type base_checker: TestableBaseClusterChecker + """ + cluster_xml = """ + + + + + + + + + + + + + """ + + mock_execute = mocker.patch.object(base_checker, "execute_command_subprocess") + mock_execute.side_effect = [ + "reboot", + cluster_xml, + "active", + cluster_xml, + "active", + ] + + base_checker.test_ready = False + base_checker.max_ready_calls = 3 + base_checker.test_stable = True + + result = base_checker.run() + + assert result["status"] == "PASSED" diff --git a/tests/module_utils/sap_automation_qa_test.py b/tests/module_utils/sap_automation_qa_test.py index f1f61d88..3672fcfe 100644 --- a/tests/module_utils/sap_automation_qa_test.py +++ b/tests/module_utils/sap_automation_qa_test.py @@ -6,7 +6,8 @@ """ import xml.etree.ElementTree as ET -from src.module_utils.sap_automation_qa import SapAutomationQA, TestStatus +from src.module_utils.sap_automation_qa import SapAutomationQA +from src.module_utils.enums import TestStatus class MockLogger: @@ -66,7 +67,6 @@ def test_init(self): Test the initialization of the SapAutomationQA class. """ sap_qa = SapAutomationQA() - assert sap_qa.result["status"] == TestStatus.NOT_STARTED.value assert sap_qa.result["message"] == "" assert not sap_qa.result["details"] assert not sap_qa.result["logs"] diff --git a/tests/modules/check_indexserver_test.py b/tests/modules/check_indexserver_test.py index 290e4ffd..a3863022 100644 --- a/tests/modules/check_indexserver_test.py +++ b/tests/modules/check_indexserver_test.py @@ -7,7 +7,7 @@ import io from src.modules.check_indexserver import IndexServerCheck, main -from src.module_utils.sap_automation_qa import TestStatus +from src.module_utils.enums import OperatingSystemFamily, TestStatus def fake_open_factory(file_content): @@ -54,7 +54,9 @@ def test_redhat_indexserver_success(self, monkeypatch): ] with monkeypatch.context() as monkey_patch: monkey_patch.setattr("builtins.open", fake_open_factory(file_lines)) - checker = IndexServerCheck(database_sid="TEST", os_distribution="redhat") + checker = IndexServerCheck( + database_sid="TEST", os_distribution=OperatingSystemFamily.REDHAT + ) checker.check_indexserver() result = checker.get_result() @@ -77,9 +79,17 @@ def test_suse_indexserver_success(self, monkeypatch): "path=/usr/share/SAPHanaSR", "dummy=dummy", ] + file_lines_angi = [ + "[ha_dr_provider_suschksrv]", + "provider=susChkSrv", + "path=/usr/share/SAPHanaSR", + "dummy=dummy", + ] with monkeypatch.context() as monkey_patch: monkey_patch.setattr("builtins.open", fake_open_factory(file_lines)) - checker = IndexServerCheck(database_sid="TEST", os_distribution="suse") + checker = IndexServerCheck( + database_sid="TEST", os_distribution=OperatingSystemFamily.SUSE + ) checker.check_indexserver() result = checker.get_result() @@ -89,12 +99,17 @@ def test_suse_indexserver_success(self, monkeypatch): assert "provider" in result["details"] assert "path" in result["details"] + monkey_patch.setattr("builtins.open", fake_open_factory(file_lines_angi)) + checker.check_indexserver() + result = checker.get_result() + assert result["status"] == TestStatus.SUCCESS.value + def test_unsupported_os(self): """ Test unsupported OS distribution. """ with io.StringIO() as _: - checker = IndexServerCheck(database_sid="TEST", os_distribution="windows") + checker = IndexServerCheck(database_sid="TEST", os_distribution="unsupported_os") checker.check_indexserver() result = checker.get_result() @@ -117,7 +132,9 @@ def test_indexserver_not_configured(self, monkeypatch): ] with monkeypatch.context() as monkey_patch: monkey_patch.setattr("builtins.open", fake_open_factory(file_lines)) - index_server_check = IndexServerCheck(database_sid="HDB", os_distribution="redhat") + index_server_check = IndexServerCheck( + database_sid="HDB", os_distribution=OperatingSystemFamily.REDHAT + ) index_server_check.check_indexserver() result = index_server_check.get_result() @@ -143,7 +160,9 @@ def fake_open(*args, **kwargs): with monkeypatch.context() as monkey_patch: monkey_patch.setattr("builtins.open", fake_open) - index_server_check = IndexServerCheck(database_sid="HDB", os_distribution="redhat") + index_server_check = IndexServerCheck( + database_sid="HDB", os_distribution=OperatingSystemFamily.REDHAT + ) index_server_check.check_indexserver() result = index_server_check.get_result() @@ -174,7 +193,6 @@ class MockAnsibleModule: def __init__(self, *args, **kwargs): self.params = { "database_sid": "TEST", - "ansible_os_family": "redhat", } def exit_json(self, **kwargs): @@ -184,8 +202,22 @@ def exit_json(self, **kwargs): nonlocal mock_result mock_result = kwargs + def mock_ansible_facts_suse(module): + """ + Mock function to return Ansible facts for Suse. + + :param module: Mock Ansible module instance. + :type module: MockAnsibleModule + :return: Dictionary with Suse facts. + :rtype: dict + """ + return {"os_family": "Suse", "distribution": "SLES", "ansible_os_family": "Suse"} + with monkeypatch.context() as monkey_patch: monkey_patch.setattr("src.modules.check_indexserver.AnsibleModule", MockAnsibleModule) monkey_patch.setattr("builtins.open", fake_open_factory(file_lines)) + monkey_patch.setattr( + "src.modules.check_indexserver.ansible_facts", mock_ansible_facts_suse + ) main() assert mock_result["status"] == TestStatus.ERROR.value diff --git a/tests/modules/get_cluster_status_db_test.py b/tests/modules/get_cluster_status_db_test.py index 42f36490..b689a109 100644 --- a/tests/modules/get_cluster_status_db_test.py +++ b/tests/modules/get_cluster_status_db_test.py @@ -7,7 +7,11 @@ import xml.etree.ElementTree as ET import pytest -from src.modules.get_cluster_status_db import HanaClusterStatusChecker, run_module +from src.modules.get_cluster_status_db import ( + HanaClusterStatusChecker, + run_module, +) +from src.module_utils.enums import OperatingSystemFamily, HanaSRProvider class TestHanaClusterStatusChecker: @@ -16,58 +20,76 @@ class TestHanaClusterStatusChecker: """ @pytest.fixture - def hana_checker(self): + def hana_checker_classic(self): """ - Fixture for creating a HanaClusterStatusChecker instance. + Fixture for creating a HanaClusterStatusChecker instance with classic SAP HANA SR provider. :return: Instance of HanaClusterStatusChecker. :rtype: HanaClusterStatusChecker """ - return HanaClusterStatusChecker(database_sid="TEST", ansible_os_family="REDHAT") + return HanaClusterStatusChecker( + database_sid="TEST", + ansible_os_family=OperatingSystemFamily.REDHAT, + saphanasr_provider=HanaSRProvider.SAPHANASR, + ) - def test_get_automation_register(self, mocker, hana_checker): + @pytest.fixture + def hana_checker_angi(self): + """ + Fixture for creating a HanaClusterStatusChecker instance with ANGI SAP HANA SR provider. + + :return: Instance of HanaClusterStatusChecker. + :rtype: HanaClusterStatusChecker + """ + return HanaClusterStatusChecker( + database_sid="TEST", + ansible_os_family=OperatingSystemFamily.SUSE, + saphanasr_provider=HanaSRProvider.ANGI, + ) + + def test_get_automation_register(self, mocker, hana_checker_classic): """ Test the _get_automation_register method. :param mocker: Mocking library for Python. :type mocker: _mocker.MagicMock - :param hana_checker: Instance of HanaClusterStatusChecker. - :type hana_checker: HanaClusterStatusChecker + :param hana_checker_classic: Instance of HanaClusterStatusChecker. + :type hana_checker_classic: HanaClusterStatusChecker """ mocker.patch.object( - hana_checker, + hana_checker_classic, "execute_command_subprocess", return_value='', ) - hana_checker._get_automation_register() + hana_checker_classic._get_automation_register() - assert hana_checker.result["AUTOMATED_REGISTER"] == "true" + assert hana_checker_classic.result["AUTOMATED_REGISTER"] == "true" - def test_get_automation_register_exception(self, mocker, hana_checker): + def test_get_automation_register_exception(self, mocker, hana_checker_classic): """ Test the _get_automation_register method when an exception occurs. :param mocker: Mocking library for Python. :type mocker: _mocker.MagicMock - :param hana_checker: Instance of HanaClusterStatusChecker. - :type hana_checker: HanaClusterStatusChecker + :param hana_checker_classic: Instance of HanaClusterStatusChecker. + :type hana_checker_classic: HanaClusterStatusChecker """ mocker.patch.object( - hana_checker, "execute_command_subprocess", side_effect=Exception("Test error") + hana_checker_classic, "execute_command_subprocess", side_effect=Exception("Test error") ) - hana_checker._get_automation_register() + hana_checker_classic._get_automation_register() - assert hana_checker.result["AUTOMATED_REGISTER"] == "unknown" + assert hana_checker_classic.result["AUTOMATED_REGISTER"] == "unknown" - def test_process_node_attributes_primary_only(self, hana_checker): + def test_process_node_attributes_primary_only(self, hana_checker_classic): """ Test processing node attributes with only the primary node. - :param hana_checker: Instance of HanaClusterStatusChecker. - :type hana_checker: HanaClusterStatusChecker + :param hana_checker_classic: Instance of HanaClusterStatusChecker. + :type hana_checker_classic: HanaClusterStatusChecker """ xml_str = """ @@ -84,7 +106,7 @@ def test_process_node_attributes_primary_only(self, hana_checker): """ - result = hana_checker._process_node_attributes(ET.fromstring(xml_str)) + result = hana_checker_classic._process_node_attributes(ET.fromstring(xml_str)) assert result["primary_node"] == "node1" assert result["secondary_node"] == "" @@ -92,12 +114,73 @@ def test_process_node_attributes_primary_only(self, hana_checker): assert result["replication_mode"] == "syncmem" assert result["primary_site_name"] == "site1" - def test_process_node_attributes_both_nodes(self, hana_checker): + def test_process_node_attributes_primary_only_angi(self, hana_checker_angi): + """ + Test processing node attributes with only the primary node when using ANGI provider. + + :param hana_checker_angi: Instance of HanaClusterStatusChecker. + :type hana_checker_angi: HanaClusterStatusChecker + """ + + xml_str = """ + + + + + + + + + + + + """ + + result = hana_checker_angi._process_node_attributes(ET.fromstring(xml_str)) + + assert result["primary_node"] == "node1" + assert result["secondary_node"] == "" + assert result["primary_site_name"] == "SITEA" + + def test_process_node_attributes_both_nodes_angi(self, hana_checker_angi): """ Test processing node attributes with both primary and secondary nodes. - :param hana_checker: Instance of HanaClusterStatusChecker. - :type hana_checker: HanaClusterStatusChecker + :param hana_checker_angi: Instance of HanaClusterStatusChecker. + :type hana_checker_angi: HanaClusterStatusChecker + """ + xml_str = """ + + + + + + + + + + + + + + + + + + + """ + result = hana_checker_angi._process_node_attributes(ET.fromstring(xml_str)) + + assert result["primary_node"] == "node1" + assert result["secondary_node"] == "node2" + assert result["primary_site_name"] == "SITEA" + + def test_process_node_attributes_both_nodes(self, hana_checker_classic): + """ + Test processing node attributes with both primary and secondary nodes. + + :param hana_checker_classic: Instance of HanaClusterStatusChecker. + :type hana_checker_classic: HanaClusterStatusChecker """ xml_str = """ @@ -117,7 +200,7 @@ def test_process_node_attributes_both_nodes(self, hana_checker): """ - result = hana_checker._process_node_attributes(ET.fromstring(xml_str)) + result = hana_checker_classic._process_node_attributes(ET.fromstring(xml_str)) assert result["primary_node"] == "node1" assert result["secondary_node"] == "node2" @@ -125,54 +208,54 @@ def test_process_node_attributes_both_nodes(self, hana_checker): assert result["replication_mode"] == "syncmem" assert result["primary_site_name"] == "site1" - def test_is_cluster_ready(self, hana_checker): + def test_is_cluster_ready(self, hana_checker_classic): """ Test the _is_cluster_ready method. - :param hana_checker: Instance of HanaClusterStatusChecker. - :type hana_checker: HanaClusterStatusChecker + :param hana_checker_classic: Instance of HanaClusterStatusChecker. + :type hana_checker_classic: HanaClusterStatusChecker """ - hana_checker.result["primary_node"] = "" - assert not hana_checker._is_cluster_ready() + hana_checker_classic.result["primary_node"] = "" + assert not hana_checker_classic._is_cluster_ready() - hana_checker.result["primary_node"] = "node1" - assert hana_checker._is_cluster_ready() + hana_checker_classic.result["primary_node"] = "node1" + assert hana_checker_classic._is_cluster_ready() - def test_is_cluster_stable(self, hana_checker): + def test_is_cluster_stable(self, hana_checker_classic): """ Test the _is_cluster_stable method. - :param hana_checker: Instance of HanaClusterStatusChecker. - :type hana_checker: HanaClusterStatusChecker + :param hana_checker_classic: Instance of HanaClusterStatusChecker. + :type hana_checker_classic: HanaClusterStatusChecker """ - hana_checker.result["primary_node"] = "" - hana_checker.result["secondary_node"] = "" - assert not hana_checker._is_cluster_stable() + hana_checker_classic.result["primary_node"] = "" + hana_checker_classic.result["secondary_node"] = "" + assert not hana_checker_classic._is_cluster_stable() - hana_checker.result["primary_node"] = "node1" - hana_checker.result["secondary_node"] = "" - assert not hana_checker._is_cluster_stable() + hana_checker_classic.result["primary_node"] = "node1" + hana_checker_classic.result["secondary_node"] = "" + assert not hana_checker_classic._is_cluster_stable() - hana_checker.result["primary_node"] = "node1" - hana_checker.result["secondary_node"] = "node2" - assert hana_checker._is_cluster_stable() + hana_checker_classic.result["primary_node"] = "node1" + hana_checker_classic.result["secondary_node"] = "node2" + assert hana_checker_classic._is_cluster_stable() - def test_run(self, mocker, hana_checker): + def test_run(self, mocker, hana_checker_classic): """ Test the run method of the HanaClusterStatusChecker class. :param mocker: Mocking library for Python. :type mocker: _mocker.MagicMock - :param hana_checker: Instance of HanaClusterStatusChecker. - :type hana_checker: HanaClusterStatusChecker + :param hana_checker_classic: Instance of HanaClusterStatusChecker. + :type hana_checker_classic: HanaClusterStatusChecker """ mock_super_run = mocker.patch( "src.module_utils.get_cluster_status.BaseClusterStatusChecker.run", return_value={"status": "PASSED"}, ) - mock_get_automation = mocker.patch.object(hana_checker, "_get_automation_register") + mock_get_automation = mocker.patch.object(hana_checker_classic, "_get_automation_register") - result = hana_checker.run() + result = hana_checker_classic.run() mock_super_run.assert_called_once() mock_get_automation.assert_called_once() @@ -194,9 +277,13 @@ def test_run_module(self, mocker): mock_ansible_module = mocker.MagicMock() mock_ansible_module.params = { "database_sid": "TEST", - "ansible_os_family": "REDHAT", "operation_step": "check", + "saphanasr_provider": "SAPHanaSR", } + mocker.patch( + "src.modules.get_cluster_status_db.ansible_facts", return_value={"os_family": "REDHAT"} + ) + mocker.patch( "src.modules.get_cluster_status_db.AnsibleModule", return_value=mock_ansible_module ) diff --git a/tests/modules/get_cluster_status_scs_test.py b/tests/modules/get_cluster_status_scs_test.py index 1fe334d3..e72bf527 100644 --- a/tests/modules/get_cluster_status_scs_test.py +++ b/tests/modules/get_cluster_status_scs_test.py @@ -186,6 +186,9 @@ def test_run_module(self, mocker): mocker.patch( "src.modules.get_cluster_status_scs.AnsibleModule", return_value=mock_ansible_module ) + mocker.patch( + "src.modules.get_cluster_status_scs.ansible_facts", return_value={"os_family": "REDHAT"} + ) mock_run = mocker.MagicMock() mock_checker = mocker.MagicMock() diff --git a/tests/modules/get_pcmk_properties_db_test.py b/tests/modules/get_pcmk_properties_db_test.py index 58b11860..9d7d5664 100644 --- a/tests/modules/get_pcmk_properties_db_test.py +++ b/tests/modules/get_pcmk_properties_db_test.py @@ -6,8 +6,10 @@ """ import io +import xml.etree.ElementTree as ET import pytest from src.modules.get_pcmk_properties_db import HAClusterValidator, main +from src.module_utils.enums import OperatingSystemFamily, HanaSRProvider, TestStatus DUMMY_XML_RSC = """ @@ -88,8 +90,8 @@ DUMMY_GLOBAL_INI = """[DEFAULT] dumm1 = dummy2 -[ha_dr_provider_SAPHanaSR] -provider = SAPHanaSR +[ha_dr_provider_sushanasr] +provider = SAPHanaSR-angi """ DUMMY_CONSTANTS = { @@ -126,7 +128,7 @@ "OS_PARAMETERS": { "DEFAULTS": {"sysctl": {"kernel.numa_balancing": "kernel.numa_balancing = 0"}} }, - "GLOBAL_INI": {"REDHAT": {"provider": "SAPHanaSR"}}, + "GLOBAL_INI": {"REDHAT": {"provider": "SAPHanaSR"}, "SUSE": {"provider": "SAPHanaSR-angi"}}, "CONSTRAINTS": {"rsc_location": {"score": "INFINITY"}}, } @@ -198,7 +200,7 @@ def mock_execute_command(*args, **kwargs): :return: Mocked command output. :rtype: str """ - command = args[1] if len(args) > 1 else kwargs.get("command") + command = str(args[1]) if len(args) > 1 else str(kwargs.get("command")) if "sysctl" in command: return DUMMY_OS_COMMAND return mock_xml_outputs.get(command[-1], "") @@ -209,15 +211,268 @@ def mock_execute_command(*args, **kwargs): ) monkeypatch.setattr("builtins.open", fake_open_factory(DUMMY_GLOBAL_INI)) return HAClusterValidator( - os_type="REDHAT", + os_type=OperatingSystemFamily.REDHAT, os_version="9.2", sid="PRD", instance_number="00", fencing_mechanism="AFA", virtual_machine_name="vmname", constants=DUMMY_CONSTANTS, + saphanasr_provider=HanaSRProvider.SAPHANASR, ) + @pytest.fixture + def validator_angi(self, monkeypatch, mock_xml_outputs): + """ + Fixture for creating a HAClusterValidator instance. + + :param monkeypatch: Monkeypatch fixture for mocking. + :type monkeypatch: pytest.MonkeyPatch + :param mock_xml_outputs: Mock XML outputs. + :type mock_xml_outputs: dict + :return: HAClusterValidator instance. + :rtype: HAClusterValidator + """ + + def mock_execute_command(*args, **kwargs): + """ + Mock function to replace execute_command_subprocess. + + :param *args: Positional arguments. + :param **kwargs: Keyword arguments. + :return: Mocked command output. + :rtype: str + """ + command = str(args[1]) if len(args) > 1 else str(kwargs.get("command")) + if "sysctl" in command: + return DUMMY_OS_COMMAND + return mock_xml_outputs.get(command[-1], "") + + monkeypatch.setattr( + "src.module_utils.sap_automation_qa.SapAutomationQA.execute_command_subprocess", + mock_execute_command, + ) + monkeypatch.setattr("builtins.open", fake_open_factory(DUMMY_GLOBAL_INI)) + return HAClusterValidator( + os_type=OperatingSystemFamily.SUSE, + os_version="9.2", + sid="PRD", + instance_number="00", + fencing_mechanism="AFA", + virtual_machine_name="vmname", + constants=DUMMY_CONSTANTS, + saphanasr_provider=HanaSRProvider.ANGI, + ) + + def test_get_expected_value_fence_config(self, validator): + """ + Test _get_expected_value method with fence configuration. + """ + validator.fencing_mechanism = "azure-fence-agent" + expected = validator._get_expected_value("crm_config", "priority") + assert expected == "10" + + def test_get_expected_value_os_config(self, validator): + """ + Test _get_expected_value method with OS configuration. + """ + expected = validator._get_expected_value("crm_config", "stonith-enabled") + assert expected == "true" + + def test_get_expected_value_defaults(self, validator): + """ + Test _get_expected_value method with defaults. + """ + expected = validator._get_expected_value("crm_config", "unknown-param") + assert expected is None + + def test_get_resource_expected_value_meta_attributes(self, validator): + """ + Test _get_resource_expected_value method for meta_attributes section. + """ + expected = validator._get_resource_expected_value( + "fence_agent", "meta_attributes", "pcmk_delay_max" + ) + assert expected == "15" + + def test_get_resource_expected_value_operations(self, validator): + """ + Test _get_resource_expected_value method for operations section. + """ + expected = validator._get_resource_expected_value( + "fence_agent", "operations", "timeout", "monitor" + ) + assert expected == ["700", "700s"] + + def test_get_resource_expected_value_unknown_section(self, validator): + """ + Test _get_resource_expected_value method for unknown section. + """ + expected = validator._get_resource_expected_value("fence_agent", "unknown_section", "param") + assert expected is None + + def test_create_parameter_with_empty_value(self, validator): + """ + Test _create_parameter method when value is empty. + """ + param = validator._create_parameter( + category="test_category", name="test_param", value="", expected_value="expected" + ) + assert param["status"] == TestStatus.INFO.value + + def test_create_parameter_with_list_expected_value_success(self, validator): + """ + Test _create_parameter method with list expected value - success case. + """ + param = validator._create_parameter( + category="test_category", + name="test_param", + value="value1", + expected_value=["value1", "value2"], + ) + assert param["status"] == TestStatus.SUCCESS.value + assert param["expected_value"] == "value1" + + def test_create_parameter_with_list_expected_value_error(self, validator): + """ + Test _create_parameter method with list expected value - error case. + """ + param = validator._create_parameter( + category="test_category", + name="test_param", + value="value3", + expected_value=["value1", "value2"], + ) + assert param["status"] == TestStatus.ERROR.value + + def test_create_parameter_with_string_expected_value_success(self, validator): + """ + Test _create_parameter method with string expected value - success case. + """ + param = validator._create_parameter( + category="test_category", + name="test_param", + value="expected_value", + expected_value="expected_value", + ) + assert param["status"] == TestStatus.SUCCESS.value + + def test_create_parameter_with_string_expected_value_error(self, validator): + """ + Test _create_parameter method with string expected value - error case. + """ + param = validator._create_parameter( + category="test_category", + name="test_param", + value="actual_value", + expected_value="expected_value", + ) + assert param["status"] == TestStatus.ERROR.value + + def test_create_parameter_with_invalid_expected_value_type(self, validator): + """ + Test _create_parameter method with invalid expected value type. + """ + param = validator._create_parameter( + category="test_category", + name="test_param", + value="test_value", + expected_value={"invalid": "type"}, + ) + assert param["status"] == TestStatus.ERROR.value + + def test_create_parameter_with_none_expected_value(self, validator): + """ + Test _create_parameter method when expected_value is None. + """ + param = validator._create_parameter( + category="crm_config", name="test_param", value="test_value", expected_value=None + ) + assert param["status"] == TestStatus.INFO.value + + def test_parse_global_ini_parameters_angi_provider(self, validator_angi): + """ + Test _parse_global_ini_parameters method with ANGI provider. + Covers lines 420-447. + """ + result = validator_angi.get_result() + assert "details" in result + assert "parameters" in result["details"] + + def test_parse_basic_config(self, validator): + """ + Test _parse_basic_config method. + Covers lines 462-473. + """ + xml_str = """ + + + """ + params = validator._parse_basic_config( + ET.fromstring(xml_str), "crm_config", "test_subcategory" + ) + assert len(params) == 2 + assert params[0]["category"] == "crm_config_test_subcategory" + assert params[0]["name"] == "test_param" + assert params[0]["value"] == "test_value" + + def test_parse_resource_hana_meta_and_topology_meta(self, validator): + """ + Test _parse_resource method for hana_meta and topology_meta categories. + Covers lines 486-521. + """ + xml_str = """ + + """ + element = ET.fromstring(xml_str) + params = validator._parse_resource(element, "hana_meta") + assert len(params) > 0 + params = validator._parse_resource(element, "topology_meta") + assert len(params) > 0 + + def test_parse_constraints_with_valid_constraints(self, validator_angi): + """ + Test _parse_constraints method with valid constraints. + Covers lines 532-552. + """ + xml_str = """ + + + + + """ + root = ET.fromstring(xml_str) + params = validator_angi._parse_constraints(root) + constraint_params = [p for p in params if p["category"] == "constraints_rsc_location"] + assert len(constraint_params) >= 1 + + def test_parse_ha_cluster_config_redhat_skip_op_defaults(self, monkeypatch): + """ + Test parse_ha_cluster_config method with REDHAT OS skipping op_defaults. + Covers lines 574-607. + """ + + def mock_execute_command(*args, **kwargs): + return "" + + monkeypatch.setattr( + "src.module_utils.sap_automation_qa.SapAutomationQA.execute_command_subprocess", + mock_execute_command, + ) + monkeypatch.setattr("builtins.open", fake_open_factory(DUMMY_GLOBAL_INI)) + validator = HAClusterValidator( + os_type=OperatingSystemFamily.REDHAT, + os_version="9.2", + sid="PRD", + instance_number="00", + fencing_mechanism="AFA", + virtual_machine_name="vmname", + constants=DUMMY_CONSTANTS, + saphanasr_provider=HanaSRProvider.SAPHANASR, + ) + result = validator.get_result() + assert "details" in result + def test_parse_ha_cluster_config_success(self, validator): """ Test the parse_ha_cluster_config method for successful parsing. @@ -246,11 +501,11 @@ def __init__(self, *args, **kwargs): self.params = { "sid": "PRD", "instance_number": "00", - "ansible_os_family": "REDHAT", "virtual_machine_name": "vm_name", "fencing_mechanism": "AFA", "os_version": "9.2", "pcmk_constants": DUMMY_CONSTANTS, + "saphanasr_provider": HanaSRProvider.SAPHANASR.value, } def exit_json(self, **kwargs): diff --git a/tests/modules/get_pcmk_properties_scs_test.py b/tests/modules/get_pcmk_properties_scs_test.py index 4349c5c9..82bbc2df 100644 --- a/tests/modules/get_pcmk_properties_scs_test.py +++ b/tests/modules/get_pcmk_properties_scs_test.py @@ -6,8 +6,10 @@ """ import io +import xml.etree.ElementTree as ET import pytest from src.modules.get_pcmk_properties_scs import HAClusterValidator, main +from src.module_utils.enums import OperatingSystemFamily, TestStatus DUMMY_XML_RSC = """ @@ -115,6 +117,11 @@ "operations": {"monitor": {"timeout": "30"}}, }, "hana": {"meta_attributes": {"clone-max": "2"}}, + "fence_agent": { + "meta_attributes": {"pcmk_delay_max": "15"}, + "operations": {"monitor": {"timeout": ["700", "700s"]}}, + "instance_attributes": {"resourceGroup": "test-rg"}, + }, } }, "OS_PARAMETERS": { @@ -192,7 +199,7 @@ def mock_execute_command(*args, **kwargs): :return: Mocked command output. :rtype: str """ - command = args[1] if len(args) > 1 else kwargs.get("command") + command = str(args[1]) if len(args) > 1 else str(kwargs.get("command")) if "sysctl" in command: return DUMMY_OS_COMMAND return mock_xml_outputs.get(command[-1], "") @@ -203,7 +210,7 @@ def mock_execute_command(*args, **kwargs): ) monkeypatch.setattr("builtins.open", fake_open_factory(DUMMY_GLOBAL_INI)) return HAClusterValidator( - os_type="REDHAT", + os_type=OperatingSystemFamily.REDHAT, sid="PRD", scs_instance_number="00", ers_instance_number="01", @@ -241,7 +248,6 @@ def __init__(self, *args, **kwargs): "sid": "PRD", "ascs_instance_number": "00", "ers_instance_number": "01", - "ansible_os_family": "REDHAT", "virtual_machine_name": "vm_name", "fencing_mechanism": "AFA", "pcmk_constants": DUMMY_CONSTANTS, @@ -251,11 +257,198 @@ def exit_json(self, **kwargs): nonlocal mock_result mock_result = kwargs + def mock_ansible_facts(module): + """ + Mock function to return Ansible facts. + + :param module: Ansible module instance. + :type module: AnsibleModule + :return: Mocked Ansible facts. + :rtype: dict + """ + return {"os_family": "REDHAT"} + monkeypatch.setattr( "src.modules.get_pcmk_properties_scs.AnsibleModule", MockAnsibleModule, ) + monkeypatch.setattr( + "src.modules.get_pcmk_properties_scs.ansible_facts", + mock_ansible_facts, + ) main() assert mock_result["status"] == "PASSED" + + def test_get_expected_value_fence_config(self, validator): + """ + Test _get_expected_value method with fence configuration. + """ + validator.fencing_mechanism = "azure-fence-agent" + expected = validator._get_expected_value("crm_config", "priority") + assert expected == "10" + + def test_get_resource_expected_value_meta_attributes(self, validator): + """ + Test _get_resource_expected_value method for meta_attributes section. + """ + expected = validator._get_resource_expected_value( + "fence_agent", "meta_attributes", "pcmk_delay_max" + ) + assert expected == "15" + + def test_create_parameter_with_none_expected_value_resource_category(self, validator): + """ + Test _create_parameter method when expected_value is None and category is + in RESOURCE_CATEGORIES. + """ + param = validator._create_parameter( + category="ipaddr", name="test_param", value="test_value", subcategory="meta_attributes" + ) + assert param["category"] == "ipaddr_meta_attributes" + + def test_create_parameter_with_none_expected_value_or_empty_value(self, validator): + """ + Test _create_parameter method when expected_value is None or value is empty. + + """ + param = validator._create_parameter( + category="crm_config", name="test_param", value="test_value", expected_value=None + ) + assert param["status"] == TestStatus.INFO.value + + param = validator._create_parameter( + category="crm_config", name="test_param", value="", expected_value="expected" + ) + assert param["status"] == TestStatus.INFO.value + + def test_parse_resource_with_meta_and_instance_attributes(self, validator): + """ + Test _parse_resource method with meta_attributes and instance_attributes. + """ + xml_str = """ + + + + + + + """ + element = ET.fromstring(xml_str) + + params = validator._parse_resource(element, "sbd_stonith") + + meta_params = [p for p in params if p["category"] == "sbd_stonith_meta_attributes"] + instance_params = [p for p in params if p["category"] == "sbd_stonith_instance_attributes"] + + assert len(meta_params) >= 1 + assert len(instance_params) >= 1 + + def test_parse_basic_config(self, validator): + """ + Test _parse_basic_config method. + """ + xml_str = """ + + + """ + element = ET.fromstring(xml_str) + + params = validator._parse_basic_config(element, "crm_config", "test_subcategory") + + assert len(params) == 2 + assert params[0]["category"] == "crm_config_test_subcategory" + assert params[0]["name"] == "test_param" + assert params[0]["value"] == "test_value" + + def test_parse_constraints_with_missing_attributes(self, validator): + """ + Test _parse_constraints method with missing attributes. + """ + xml_str = """ + + """ + root = ET.fromstring(xml_str) + params = validator._parse_constraints(root) + assert isinstance(params, list) + + def test_parse_ha_cluster_config_with_empty_root(self, monkeypatch): + """ + Test parse_ha_cluster_config method when root is empty. + Covers lines 508-546. + """ + + def mock_execute_command(*args, **kwargs): + return "" + + monkeypatch.setattr( + "src.module_utils.sap_automation_qa.SapAutomationQA.execute_command_subprocess", + mock_execute_command, + ) + + validator = HAClusterValidator( + os_type=OperatingSystemFamily.SUSE, + sid="PRD", + scs_instance_number="00", + ers_instance_number="01", + fencing_mechanism="AFA", + virtual_machine_name="vmname", + constants=DUMMY_CONSTANTS, + ) + + result = validator.get_result() + assert "details" in result + + def test_get_resource_expected_value_operations_section(self, validator): + """ + Test _get_resource_expected_value method for operations section. + """ + expected = validator._get_resource_expected_value( + "fence_agent", "operations", "timeout", "monitor" + ) + assert expected == ["700", "700s"] + + def test_get_resource_expected_value_return_none(self, validator): + """ + Test _get_resource_expected_value method returns None for unknown section. + """ + expected = validator._get_resource_expected_value("fence_agent", "unknown_section", "param") + assert expected is None + + def test_create_parameter_with_list_expected_value_success(self, validator): + """ + Test _create_parameter method with list expected value - success case. + """ + param = validator._create_parameter( + category="test_category", + name="test_param", + value="value1", + expected_value=["value1", "value2"], + ) + assert param["status"] == TestStatus.SUCCESS.value + assert param["expected_value"] == "value1" + + def test_create_parameter_with_list_expected_value_error(self, validator): + """ + Test _create_parameter method with list expected value - error case. + """ + param = validator._create_parameter( + category="test_category", + name="test_param", + value="value3", + expected_value=["value1", "value2"], + ) + assert param["status"] == TestStatus.ERROR.value + + def test_create_parameter_with_invalid_expected_value_type(self, validator): + """ + Test _create_parameter method with invalid expected value type. + """ + param = validator._create_parameter( + category="test_category", + name="test_param", + value="test_value", + expected_value=123, + ) + assert param["status"] == TestStatus.ERROR.value diff --git a/tests/modules/location_constraints_test.py b/tests/modules/location_constraints_test.py index 32934aae..e63eae86 100644 --- a/tests/modules/location_constraints_test.py +++ b/tests/modules/location_constraints_test.py @@ -8,6 +8,7 @@ import xml.etree.ElementTree as ET import pytest from src.modules.location_constraints import LocationConstraintsManager, main +from src.module_utils.enums import OperatingSystemFamily LC_STR = """ @@ -49,7 +50,7 @@ def location_constraints_manager(self): :return: LocationConstraintsManager instance :rtype: LocationConstraintsManager """ - return LocationConstraintsManager(ansible_os_family="SUSE") + return LocationConstraintsManager(ansible_os_family=OperatingSystemFamily.SUSE) def test_location_constraints_exists_success( self, @@ -131,7 +132,7 @@ class MockAnsibleModule: """ def __init__(self, argument_spec, supports_check_mode): - self.params = {"action": "remove", "ansible_os_family": "SUSE"} + self.params = {"action": "remove"} self.check_mode = False def exit_json(self, **kwargs): @@ -140,9 +141,23 @@ def exit_json(self, **kwargs): """ mock_result.update(kwargs) + def mock_ansible_facts(module): + """ + Mock function to return Ansible facts. + + :param module: Mock Ansible module instance. + :type module: MockAnsibleModule + :return: Dictionary with Ansible facts. + :rtype: dict + """ + return {"os_family": "SUSE"} + with monkeypatch.context() as monkey_patch: monkey_patch.setattr( "src.modules.location_constraints.AnsibleModule", MockAnsibleModule ) + monkey_patch.setattr( + "src.modules.location_constraints.ansible_facts", mock_ansible_facts + ) main() assert mock_result["status"] == "INFO" diff --git a/tests/modules/log_parser_test.py b/tests/modules/log_parser_test.py index 15f984d2..c8c550da 100644 --- a/tests/modules/log_parser_test.py +++ b/tests/modules/log_parser_test.py @@ -8,6 +8,7 @@ import json import pytest from src.modules.log_parser import LogParser, PCMK_KEYWORDS, SYS_KEYWORDS, main +from src.module_utils.enums import OperatingSystemFamily class TestLogParser: @@ -27,7 +28,7 @@ def log_parser_redhat(self): start_time="2025-01-01 00:00:00", end_time="2025-01-01 23:59:59", log_file="test_log_file.log", - ansible_os_family="REDHAT", + ansible_os_family=OperatingSystemFamily.REDHAT, ) @pytest.fixture @@ -42,7 +43,7 @@ def log_parser_suse(self): start_time="2023-01-01 00:00:00", end_time="2023-01-01 23:59:59", log_file="test_log_file.log", - ansible_os_family="SUSE", + ansible_os_family=OperatingSystemFamily.SUSE, ) def test_parse_logs_success(self, mocker, log_parser_redhat): @@ -152,8 +153,20 @@ def __init__(self, argument_spec, supports_check_mode): def exit_json(self, **kwargs): mock_result.update(kwargs) + def mock_ansible_facts(module): + """ + Mock function to return Ansible facts for RedHat. + + :param module: Mock Ansible module instance. + :type module: MockAnsibleModule + :return: Dictionary with Ansible facts. + :rtype: dict + """ + return {"os_family": "RedHat"} + with monkeypatch.context() as monkey_patch: monkey_patch.setattr("src.modules.log_parser.AnsibleModule", MockAnsibleModule) + monkey_patch.setattr("src.modules.log_parser.ansible_facts", mock_ansible_facts) main() assert mock_result["status"] == "FAILED" @@ -180,6 +193,29 @@ def test_merge_logs_success(self, log_parser_redhat): assert len(filtered_logs) == len(log_parser_redhat.logs) assert result["status"] == "PASSED" + def test_merge_logs_success_suse(self, log_parser_suse): + """ + Test the merge_logs method for successful log merging. + + :param log_parser_suse: LogParser instance. + :type log_parser_suse: LogParser + """ + log_parser_suse.logs = [ + '["Jan 01 12:34:56 server1 pacemaker-controld: Notice: ' + 'Resource SAPHana_HDB_00 started"]', + '["Jan 01 12:35:00 server2 pacemaker-controld: Notice: ' + 'Resource SAPHana_HDB_01 started"]', + '["Jan 01 12:36:00 server3 pacemaker-controld: Notice: ' + 'Resource SAPHana_HDB_02 started"]', + ] + + log_parser_suse.merge_logs() + result = log_parser_suse.get_result() + + filtered_logs = [log.strip() for log in json.loads(result["filtered_logs"])] + assert len(filtered_logs) == len(log_parser_suse.logs) + assert result["status"] == "PASSED" + def test_merge_logs_empty_input(self, log_parser_redhat): """ Test the merge_logs method with empty input. @@ -215,3 +251,104 @@ def test_merge_logs_invalid_json(self, log_parser_redhat): filtered_logs = [log.strip() for log in json.loads(result["filtered_logs"])] assert len(filtered_logs) == 2 assert result["status"] == "PASSED" + + def test_merge_logs_suse_timestamp_parsing(self, log_parser_suse): + """ + Test the merge_logs method with SUSE timestamp format. + """ + log_parser_suse.logs = [ + '["2023-01-01T12:34:56.123456789+01:00 server1 pacemaker-controld: Notice: Resource SAPHana_HDB_00 started"]', + '["2023-01-01T12:35:00.987654321+01:00 server2 pacemaker-controld: Notice: Resource SAPHana_HDB_01 started"]', + ] + log_parser_suse.merge_logs() + result = log_parser_suse.get_result() + filtered_logs = json.loads(result["filtered_logs"]) + assert len(filtered_logs) == 2 + assert result["status"] == "PASSED" + + def test_merge_logs_unknown_os_family(self, monkeypatch): + """ + Test the merge_logs method with unknown OS family. + """ + + def mock_execute_command(*args, **kwargs): + return "" + + monkeypatch.setattr( + "src.module_utils.sap_automation_qa.SapAutomationQA.execute_command_subprocess", + mock_execute_command, + ) + log_parser_unknown = LogParser( + start_time="2023-01-01 00:00:00", + end_time="2023-01-01 23:59:59", + log_file="test_log_file.log", + ansible_os_family=OperatingSystemFamily.DEBIAN, + ) + + log_parser_unknown.logs = [ + '["Jan 01 12:34:56 server1 pacemaker-controld: Notice: Resource SAPHana_HDB_00 started"]', + ] + + log_parser_unknown.merge_logs() + result = log_parser_unknown.get_result() + + filtered_logs = json.loads(result["filtered_logs"]) + assert len(filtered_logs) == 1 + assert result["status"] == "PASSED" + + def test_parse_logs_suse_timestamp_format(self, mocker, log_parser_suse): + """ + Test the parse_logs method with SUSE timestamp format. + """ + mocker.patch( + "builtins.open", + mocker.mock_open( + read_data="""2023-01-01T12:34:56.123456789+01:00 nodename SAPHana: SAP HANA action +2023-01-01T12:35:00.987654321+01:00 nodename pacemaker-controld: Pacemaker action""" + ), + ) + + log_parser_suse.parse_logs() + result = log_parser_suse.get_result() + + filtered_logs = json.loads(result["filtered_logs"]) + assert len(filtered_logs) == 2 + assert result["status"] == "PASSED" + + def test_run_module_merge_logs_function(self, monkeypatch): + """ + Test the run_module function with merge_logs function parameter. + """ + mock_result = {} + + class MockAnsibleModule: + """ + Mock AnsibleModule for testing merge_logs function. + """ + + def __init__(self, argument_spec, supports_check_mode): + self.params = { + "start_time": "2023-01-01 00:00:00", + "end_time": "2023-01-01 23:59:59", + "log_file": "test_log_file.log", + "function": "merge_logs", + "logs": ['["Jan 01 12:34:56 server1 test log"]'], + } + self.check_mode = False + + def exit_json(self, **kwargs): + mock_result.update(kwargs) + + def mock_ansible_facts(module): + """ + Mock function to return Ansible facts. + """ + return {"os_family": "RedHat"} + + with monkeypatch.context() as monkey_patch: + monkey_patch.setattr("src.modules.log_parser.AnsibleModule", MockAnsibleModule) + monkey_patch.setattr("src.modules.log_parser.ansible_facts", mock_ansible_facts) + from src.modules.log_parser import run_module + + run_module() + assert mock_result["status"] == "PASSED" diff --git a/tests/roles/ha_db_hana/block_network_test.py b/tests/roles/ha_db_hana/block_network_test.py index 6ec64a81..021854ec 100644 --- a/tests/roles/ha_db_hana/block_network_test.py +++ b/tests/roles/ha_db_hana/block_network_test.py @@ -51,6 +51,7 @@ def test_environment(self, ansible_inventory): "bin/nc", "bin/echo", "bin/sleep", + "bin/SAPHanaSR-manageProvider", ] temp_dir = self.setup_test_environment( @@ -101,8 +102,8 @@ def test_functional_db_primary_node_success(self, test_environment, ansible_inve assert result.rc == 0, ( f"Playbook failed with status: {result.rc}\n" - f"STDOUT: {result.stdout.read() if result.stdout else 'No output'}\n" - f"STDERR: {result.stderr.read() if result.stderr else 'No errors'}\n" + f"STDOUT: {result.stdout if hasattr(result, 'stdout') else 'No output'}\n" + f"STDERR: {result.stderr if hasattr(result, 'stderr') else 'No errors'}\n" f"Events: {[e.get('event') for e in result.events if 'event' in e]}" ) diff --git a/tests/roles/ha_db_hana/ha_config_test.py b/tests/roles/ha_db_hana/ha_config_test.py index 16944dd6..e6be97df 100644 --- a/tests/roles/ha_db_hana/ha_config_test.py +++ b/tests/roles/ha_db_hana/ha_config_test.py @@ -59,6 +59,7 @@ def test_environment(self, ansible_inventory): "project/library/get_package_list", "bin/crm_resource", "bin/crm", + "bin/SAPHanaSR-manageProvider", ], extra_vars_override={"node_tier": "hana"}, ) diff --git a/tests/roles/ha_db_hana/primary_node_ops_test.py b/tests/roles/ha_db_hana/primary_node_ops_test.py index 166a00ba..ec441c7d 100644 --- a/tests/roles/ha_db_hana/primary_node_ops_test.py +++ b/tests/roles/ha_db_hana/primary_node_ops_test.py @@ -113,6 +113,7 @@ def test_environment(self, ansible_inventory, task_type): "bin/crm", "bin/echo", "bin/killall", + "bin/SAPHanaSR-manageProvider", ] if task_type["task_name"] == "sbd-fencing": diff --git a/tests/roles/ha_db_hana/resource_migration_test.py b/tests/roles/ha_db_hana/resource_migration_test.py index a5f65e79..a9a34493 100644 --- a/tests/roles/ha_db_hana/resource_migration_test.py +++ b/tests/roles/ha_db_hana/resource_migration_test.py @@ -60,6 +60,10 @@ def test_environment(self, ansible_inventory): "name": "get_hana_resource_id", "SUSE": "cibadmin --query --scope resources", }, + { + "name": "get_hana_resource_id_saphanasr_angi", + "SUSE": "cibadmin --query --scope resources", + }, ] temp_dir = self.setup_test_environment( @@ -75,6 +79,7 @@ def test_environment(self, ansible_inventory): "bin/cibadmin", "bin/crm_resource", "bin/crm", + "bin/SAPHanaSR-manageProvider", ], extra_vars_override={"commands": commands, "node_tier": "hana"}, ) diff --git a/tests/roles/ha_db_hana/secondary_node_ops_test.py b/tests/roles/ha_db_hana/secondary_node_ops_test.py index 82a8924d..2ffe698f 100644 --- a/tests/roles/ha_db_hana/secondary_node_ops_test.py +++ b/tests/roles/ha_db_hana/secondary_node_ops_test.py @@ -87,6 +87,7 @@ def test_environment(self, ansible_inventory, task_type): "bin/crm_resource", "bin/echo", "bin/killall", + "bin/SAPHanaSR-manageProvider", ], extra_vars_override={"node_tier": "hana"}, ) diff --git a/tests/roles/mock_data/SAPHanaSR-manageProvider.txt b/tests/roles/mock_data/SAPHanaSR-manageProvider.txt new file mode 100644 index 00000000..81a3b395 --- /dev/null +++ b/tests/roles/mock_data/SAPHanaSR-manageProvider.txt @@ -0,0 +1,5 @@ +#!/bin/bash + +echo "[ha_dr_provider_saphanasr]" + +exit 0 \ No newline at end of file diff --git a/tests/roles/mock_data/check_indexserver.txt b/tests/roles/mock_data/check_indexserver.txt index 2bc840a7..b939acd1 100644 --- a/tests/roles/mock_data/check_indexserver.txt +++ b/tests/roles/mock_data/check_indexserver.txt @@ -7,7 +7,6 @@ def main(): module = AnsibleModule( argument_spec=dict( database_sid=dict(type="str", required=True), - ansible_os_family=dict(type="str", required=True), ) ) diff --git a/tests/roles/mock_data/get_cluster_status_db.txt b/tests/roles/mock_data/get_cluster_status_db.txt index b8478690..e406ad9f 100644 --- a/tests/roles/mock_data/get_cluster_status_db.txt +++ b/tests/roles/mock_data/get_cluster_status_db.txt @@ -9,7 +9,7 @@ def main(): argument_spec=dict( operation_step=dict(type="str", required=True), database_sid=dict(type="str", required=True), - ansible_os_family=dict(type="str", required=False), + saphanasr_provider=dict(type="str", required=True) ) ) diff --git a/tests/roles/mock_data/get_cluster_status_scs.txt b/tests/roles/mock_data/get_cluster_status_scs.txt index a868a4ab..4d8625e5 100644 --- a/tests/roles/mock_data/get_cluster_status_scs.txt +++ b/tests/roles/mock_data/get_cluster_status_scs.txt @@ -9,7 +9,6 @@ def main(): module = AnsibleModule( argument_spec=dict( sap_sid=dict(type='str', required=True), - ansible_os_family=dict(type='str', required=True), ) ) diff --git a/tests/roles/mock_data/get_pcmk_properties_db.txt b/tests/roles/mock_data/get_pcmk_properties_db.txt index cefe2503..856c8ad3 100644 --- a/tests/roles/mock_data/get_pcmk_properties_db.txt +++ b/tests/roles/mock_data/get_pcmk_properties_db.txt @@ -6,11 +6,11 @@ def main(): argument_spec=dict( sid=dict(type="str"), instance_number=dict(type="str"), - ansible_os_family=dict(type="str"), virtual_machine_name=dict(type="str"), fencing_mechanism=dict(type="str"), os_version=dict(type="str"), pcmk_constants=dict(type="dict"), + saphanasr_provider=dict(type="str", required=True) ) ) diff --git a/tests/roles/mock_data/get_pcmk_properties_scs.txt b/tests/roles/mock_data/get_pcmk_properties_scs.txt index 57927b71..a72ae8f9 100644 --- a/tests/roles/mock_data/get_pcmk_properties_scs.txt +++ b/tests/roles/mock_data/get_pcmk_properties_scs.txt @@ -7,7 +7,6 @@ def main(): sid=dict(type='str', required=True), ascs_instance_number=dict(type='str', required=True), ers_instance_number=dict(type='str', required=True), - ansible_os_family=dict(type='str', required=True), virtual_machine_name=dict(type='str', required=True), pcmk_constants=dict(type='dict', required=True), fencing_mechanism=dict(type='str', required=True), diff --git a/tests/roles/mock_data/location_constraints.txt b/tests/roles/mock_data/location_constraints.txt index cc44e972..49818ac7 100644 --- a/tests/roles/mock_data/location_constraints.txt +++ b/tests/roles/mock_data/location_constraints.txt @@ -7,13 +7,11 @@ def main(): module = AnsibleModule( argument_spec=dict( action=dict(type="str", required=True), - ansible_os_family=dict(type="str", required=True), ), supports_check_mode=True ) action = module.params["action"] - ansible_os_family = module.params["ansible_os_family"] diff --git a/tests/roles/mock_data/log_parser.txt b/tests/roles/mock_data/log_parser.txt index 762bd3bb..4990f81f 100644 --- a/tests/roles/mock_data/log_parser.txt +++ b/tests/roles/mock_data/log_parser.txt @@ -11,7 +11,6 @@ def main(): end_time=dict(type='str', required=False), log_file=dict(type='str', required=False, default='/var/log/messages'), keywords=dict(type='list', required=False, default=[]), - ansible_os_family=dict(type='str', required=True), function=dict(type='str', required=True), logs=dict(type='list', required=False) ) diff --git a/tests/roles/mock_data/secondary_get_cluster_status_db.txt b/tests/roles/mock_data/secondary_get_cluster_status_db.txt index 5eed9c9d..fb4378cd 100644 --- a/tests/roles/mock_data/secondary_get_cluster_status_db.txt +++ b/tests/roles/mock_data/secondary_get_cluster_status_db.txt @@ -9,7 +9,7 @@ def main(): argument_spec=dict( operation_step=dict(type="str", required=True), database_sid=dict(type="str", required=True), - ansible_os_family=dict(type="str", required=False), + saphanasr_provider=dict(type="str", required=True) ) ) diff --git a/tests/roles/roles_testing_base.py b/tests/roles/roles_testing_base.py index d9f43c8d..11534f5c 100644 --- a/tests/roles/roles_testing_base.py +++ b/tests/roles/roles_testing_base.py @@ -162,6 +162,7 @@ def setup_test_environment( "misc/tasks/var-log-messages.yml", "misc/tasks/post-telemetry-data.yml", "misc/tasks/loadbalancer.yml", + "misc/tasks/get-saphanasr-provider.yml", ] task_file = f"{role_type}/tasks/{task_name}.yml" From 61a986ac4b9cc506bc033326eff06c585f448c23 Mon Sep 17 00:00:00 2001 From: devanshjain Date: Tue, 24 Jun 2025 22:26:09 +0000 Subject: [PATCH 2/6] Refactor log_parser.py for improved readability and maintainability --- src/modules/log_parser.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/modules/log_parser.py b/src/modules/log_parser.py index ade2a123..d9a3b96a 100644 --- a/src/modules/log_parser.py +++ b/src/modules/log_parser.py @@ -14,8 +14,8 @@ from ansible.module_utils.sap_automation_qa import SapAutomationQA, TestStatus from ansible.module_utils.enums import OperatingSystemFamily except ImportError: - from src.module_utils.sap_automation_qa import SapAutomationQA, TestStatus - from src.module_utils.enums import OperatingSystemFamily + from src.module_utils.sap_automation_qa import SapAutomationQA + from src.module_utils.enums import OperatingSystemFamily, TestStatus DOCUMENTATION = r""" --- From 1c487e8c1f92a4b5ffb3a34620a763a2cf208a46 Mon Sep 17 00:00:00 2001 From: devanshjain Date: Wed, 25 Jun 2025 18:31:28 +0000 Subject: [PATCH 3/6] Fix: Ensure command failure is handled correctly in get-saphanasr-provider.yml --- src/roles/misc/tasks/get-saphanasr-provider.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/src/roles/misc/tasks/get-saphanasr-provider.yml b/src/roles/misc/tasks/get-saphanasr-provider.yml index 135230ea..c8a4f3cd 100644 --- a/src/roles/misc/tasks/get-saphanasr-provider.yml +++ b/src/roles/misc/tasks/get-saphanasr-provider.yml @@ -14,6 +14,7 @@ ansible.builtin.command: "SAPHanaSR-manageProvider --sid {{ db_sid | upper }} --show --provider=sushanasr" register: hanasr_command_output changed_when: false + failed_when: false args: chdir: "/usr/sbin" From d9504306cb5a9e0724b1d385b573ecd1d9b5ace7 Mon Sep 17 00:00:00 2001 From: devanshjain Date: Thu, 26 Jun 2025 20:46:05 +0000 Subject: [PATCH 4/6] Enhance HANA DB cluster status checks with db_instance_number parameter and update .gitignore for Ansible files --- .gitignore | 1 + scripts/sap_automation_qa.sh | 1 + scripts/setup.sh | 5 +++-- src/module_utils/enums.py | 1 + src/modules/check_indexserver.py | 2 +- src/modules/get_cluster_status_db.py | 14 ++++++++++++-- src/modules/get_cluster_status_scs.py | 2 +- src/modules/get_pcmk_properties_db.py | 2 +- src/modules/get_pcmk_properties_scs.py | 2 +- src/modules/location_constraints.py | 2 +- src/modules/log_parser.py | 2 +- src/roles/ha_db_hana/tasks/block-network.yml | 4 ++++ src/roles/ha_db_hana/tasks/fs-freeze.yml | 2 ++ src/roles/ha_db_hana/tasks/primary-crash-index.yml | 3 +++ src/roles/ha_db_hana/tasks/primary-echo-b.yml | 3 +++ src/roles/ha_db_hana/tasks/primary-node-crash.yml | 2 ++ src/roles/ha_db_hana/tasks/primary-node-kill.yml | 3 +++ src/roles/ha_db_hana/tasks/resource-migration.yml | 2 ++ src/roles/ha_db_hana/tasks/sbd-fencing.yml | 2 ++ .../ha_db_hana/tasks/secondary-crash-index.yml | 2 ++ src/roles/ha_db_hana/tasks/secondary-echo-b.yml | 2 ++ src/roles/ha_db_hana/tasks/secondary-node-kill.yml | 2 ++ src/roles/misc/tasks/cluster-report.yml | 1 + src/roles/misc/tasks/pre-validations-db.yml | 1 + 24 files changed, 53 insertions(+), 10 deletions(-) diff --git a/.gitignore b/.gitignore index 34fe382f..82d7c8f1 100644 --- a/.gitignore +++ b/.gitignore @@ -408,3 +408,4 @@ WORKSPACES/* VMWPASSWORD .coverage.* *.ini +.ansible/ diff --git a/scripts/sap_automation_qa.sh b/scripts/sap_automation_qa.sh index a2b29da3..e8fb538d 100755 --- a/scripts/sap_automation_qa.sh +++ b/scripts/sap_automation_qa.sh @@ -3,6 +3,7 @@ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. +set -euo pipefail # Activate the virtual environment source "$(realpath $(dirname $(realpath $0))/..)/.venv/bin/activate" diff --git a/scripts/setup.sh b/scripts/setup.sh index d9a26118..a47a65d0 100755 --- a/scripts/setup.sh +++ b/scripts/setup.sh @@ -1,8 +1,9 @@ +#!/bin/bash + # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. -#!/bin/bash - +set -euo pipefail # Function to check if a command exists command_exists() { command -v "$1" &> /dev/null diff --git a/src/module_utils/enums.py b/src/module_utils/enums.py index 5b8f476f..37da3ccd 100644 --- a/src/module_utils/enums.py +++ b/src/module_utils/enums.py @@ -36,6 +36,7 @@ class OperatingSystemFamily(Enum): SUSE = "SUSE" DEBIAN = "DEBIAN" WINDOWS = "WINDOWS" + UNKNOWN = "UNKNOWN" class HanaSRProvider(Enum): diff --git a/src/modules/check_indexserver.py b/src/modules/check_indexserver.py index bdfe3ee0..a3106fbf 100644 --- a/src/modules/check_indexserver.py +++ b/src/modules/check_indexserver.py @@ -223,7 +223,7 @@ def main(): index_server_check = IndexServerCheck( database_sid=database_sid, os_distribution=OperatingSystemFamily( - str(ansible_facts(module).get("os_family", "SUSE")).upper() + str(ansible_facts(module).get("os_family", "UNKNOWN")).upper() ), ) index_server_check.check_indexserver() diff --git a/src/modules/get_cluster_status_db.py b/src/modules/get_cluster_status_db.py index 949f6e58..2928890c 100644 --- a/src/modules/get_cluster_status_db.py +++ b/src/modules/get_cluster_status_db.py @@ -46,6 +46,11 @@ - The SAP HANA system replication provider type type: str required: true + db_instance_number: + description: + - The instance number of the SAP HANA database + type: str + required: true author: - Microsoft Corporation notes: @@ -138,12 +143,14 @@ class HanaClusterStatusChecker(BaseClusterStatusChecker): def __init__( self, database_sid: str, + db_instance_number: str, saphanasr_provider: HanaSRProvider, ansible_os_family: OperatingSystemFamily, ): super().__init__(ansible_os_family) self.database_sid = database_sid self.saphanasr_provider = saphanasr_provider + self.db_instance_number = db_instance_number self.result.update( { "primary_node": "", @@ -199,7 +206,8 @@ def _process_node_attributes(self, cluster_status_xml: ET.Element) -> Dict[str, }, HanaSRProvider.ANGI: { "clone_attr": f"hana_{self.database_sid}_clone_state", - "sync_attr": f"master-rsc_SAPHanaCon_{self.database_sid.upper()}_HDB00", + "sync_attr": f"master-rsc_SAPHanaCon_{self.database_sid.upper()}" + + f"_HDB{self.db_instance_number}", "primary": {"clone": "PROMOTED", "sync": "150"}, "secondary": {"clone": "DEMOTED", "sync": "100"}, }, @@ -279,6 +287,7 @@ def run_module() -> None: operation_step=dict(type="str", required=True), database_sid=dict(type="str", required=True), saphanasr_provider=dict(type="str", required=True), + db_instance_number=dict(type="str", required=True), filter=dict(type="str", required=False, default="os_family"), ) @@ -288,8 +297,9 @@ def run_module() -> None: database_sid=module.params["database_sid"], saphanasr_provider=HanaSRProvider(module.params["saphanasr_provider"]), ansible_os_family=OperatingSystemFamily( - str(ansible_facts(module).get("os_family", "SUSE")).upper() + str(ansible_facts(module).get("os_family", "UNKNOWN")).upper() ), + db_instance_number=module.params["db_instance_number"], ) checker.run() diff --git a/src/modules/get_cluster_status_scs.py b/src/modules/get_cluster_status_scs.py index 03f2be33..3bf5a966 100644 --- a/src/modules/get_cluster_status_scs.py +++ b/src/modules/get_cluster_status_scs.py @@ -285,7 +285,7 @@ def run_module() -> None: ) module = AnsibleModule(argument_spec=module_args, supports_check_mode=True) - ansible_os_family = str(ansible_facts(module).get("os_family", "SUSE")).upper() + ansible_os_family = str(ansible_facts(module).get("os_family", "UNKNOWN")).upper() checker = SCSClusterStatusChecker( sap_sid=module.params["sap_sid"], diff --git a/src/modules/get_pcmk_properties_db.py b/src/modules/get_pcmk_properties_db.py index 0ffdff64..c9b06007 100644 --- a/src/modules/get_pcmk_properties_db.py +++ b/src/modules/get_pcmk_properties_db.py @@ -650,7 +650,7 @@ def main() -> None: ) validator = HAClusterValidator( - os_type=OperatingSystemFamily(str(ansible_facts(module).get("os_family", "SUSE")).upper()), + os_type=OperatingSystemFamily(str(ansible_facts(module).get("os_family", "UNKNOWN")).upper()), os_version=module.params["os_version"], instance_number=module.params["instance_number"], sid=module.params["sid"], diff --git a/src/modules/get_pcmk_properties_scs.py b/src/modules/get_pcmk_properties_scs.py index 2a031479..70c5c77d 100644 --- a/src/modules/get_pcmk_properties_scs.py +++ b/src/modules/get_pcmk_properties_scs.py @@ -587,7 +587,7 @@ def main() -> None: sid=module.params["sid"], scs_instance_number=module.params["ascs_instance_number"], ers_instance_number=module.params["ers_instance_number"], - os_type=OperatingSystemFamily(str(ansible_facts(module).get("os_family", "SUSE")).upper()), + os_type=OperatingSystemFamily(str(ansible_facts(module).get("os_family", "UNKNOWN")).upper()), virtual_machine_name=module.params["virtual_machine_name"], constants=module.params["pcmk_constants"], fencing_mechanism=module.params["fencing_mechanism"], diff --git a/src/modules/location_constraints.py b/src/modules/location_constraints.py index b8913acb..71e510b2 100644 --- a/src/modules/location_constraints.py +++ b/src/modules/location_constraints.py @@ -154,7 +154,7 @@ def run_module() -> None: manager = LocationConstraintsManager( ansible_os_family=OperatingSystemFamily( - str(ansible_facts(module).get("os_family", "SUSE")).upper() + str(ansible_facts(module).get("os_family", "UNKNOWN")).upper() ) ) diff --git a/src/modules/log_parser.py b/src/modules/log_parser.py index d9a3b96a..c55d61d5 100644 --- a/src/modules/log_parser.py +++ b/src/modules/log_parser.py @@ -329,7 +329,7 @@ def run_module() -> None: end_time=module.params.get("end_time"), log_file=module.params.get("log_file"), ansible_os_family=OperatingSystemFamily( - str(ansible_facts(module).get("os_family", "SUSE")).upper() + str(ansible_facts(module).get("os_family", "UNKNOWN")).upper() ), logs=module.params.get("logs"), ) diff --git a/src/roles/ha_db_hana/tasks/block-network.yml b/src/roles/ha_db_hana/tasks/block-network.yml index 7e4728bb..98b39c02 100644 --- a/src/roles/ha_db_hana/tasks/block-network.yml +++ b/src/roles/ha_db_hana/tasks/block-network.yml @@ -89,6 +89,7 @@ block: - name: "Test Execution: Validate HANA DB cluster status (Primary Node)" get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" @@ -114,6 +115,7 @@ - name: "Test Execution: Validate HANA DB cluster status 2" get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" @@ -132,6 +134,7 @@ block: - name: "Test Execution: Validate HANA DB cluster status (Secondary Node)" get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" @@ -148,6 +151,7 @@ - name: "Test Execution: Validate HANA DB cluster status 2" get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" diff --git a/src/roles/ha_db_hana/tasks/fs-freeze.yml b/src/roles/ha_db_hana/tasks/fs-freeze.yml index aa00388c..5efa5cd0 100644 --- a/src/roles/ha_db_hana/tasks/fs-freeze.yml +++ b/src/roles/ha_db_hana/tasks/fs-freeze.yml @@ -55,6 +55,7 @@ - name: "Test Execution: Validate HANA DB cluster status 1" when: cluster_status_pre.AUTOMATED_REGISTER | lower == "true" get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" @@ -70,6 +71,7 @@ block: - name: "Test Execution: Validate HANA DB cluster status 2" get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "post_failover" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" diff --git a/src/roles/ha_db_hana/tasks/primary-crash-index.yml b/src/roles/ha_db_hana/tasks/primary-crash-index.yml index 841c733b..a3543ae5 100644 --- a/src/roles/ha_db_hana/tasks/primary-crash-index.yml +++ b/src/roles/ha_db_hana/tasks/primary-crash-index.yml @@ -53,6 +53,7 @@ - name: "Test Execution: Validate HANA DB cluster status 1" when: cluster_status_pre.AUTOMATED_REGISTER == "true" get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" @@ -66,6 +67,7 @@ - name: "Test Execution: Validate HANA DB cluster status" when: cluster_status_pre.AUTOMATED_REGISTER == "false" get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" @@ -114,6 +116,7 @@ - name: "Test Execution: Validate HANA DB cluster status 2" get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "post_failover" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" diff --git a/src/roles/ha_db_hana/tasks/primary-echo-b.yml b/src/roles/ha_db_hana/tasks/primary-echo-b.yml index b54471e8..173acecb 100644 --- a/src/roles/ha_db_hana/tasks/primary-echo-b.yml +++ b/src/roles/ha_db_hana/tasks/primary-echo-b.yml @@ -45,6 +45,7 @@ - name: "Test Execution: Validate HANA DB cluster status 1" when: cluster_status_pre.AUTOMATED_REGISTER == "true" get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" @@ -58,6 +59,7 @@ - name: "Test Execution: Validate HANA DB cluster status" when: cluster_status_pre.AUTOMATED_REGISTER == "false" get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" @@ -102,6 +104,7 @@ - name: "Test Execution: Validate HANA DB cluster status 2" get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "post_failover" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" diff --git a/src/roles/ha_db_hana/tasks/primary-node-crash.yml b/src/roles/ha_db_hana/tasks/primary-node-crash.yml index e4eb5224..00089109 100644 --- a/src/roles/ha_db_hana/tasks/primary-node-crash.yml +++ b/src/roles/ha_db_hana/tasks/primary-node-crash.yml @@ -41,6 +41,7 @@ - name: "Test Execution: Validate HANA DB cluster status 1" get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" @@ -83,6 +84,7 @@ - name: "Test execution: Validate HANA DB cluster status 2" get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "post_failover" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" diff --git a/src/roles/ha_db_hana/tasks/primary-node-kill.yml b/src/roles/ha_db_hana/tasks/primary-node-kill.yml index 19d8fa26..5eca4111 100644 --- a/src/roles/ha_db_hana/tasks/primary-node-kill.yml +++ b/src/roles/ha_db_hana/tasks/primary-node-kill.yml @@ -42,6 +42,7 @@ - name: "Test Execution: Validate HANA DB cluster status 1" when: cluster_status_pre.AUTOMATED_REGISTER == "true" get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" @@ -57,6 +58,7 @@ block: - name: "Test Execution: Validate HANA DB cluster status" get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" @@ -100,6 +102,7 @@ - name: "Test execution: Validate HANA DB cluster status 2" get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "post_failover" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" diff --git a/src/roles/ha_db_hana/tasks/resource-migration.yml b/src/roles/ha_db_hana/tasks/resource-migration.yml index 5afc7fdd..0c609767 100644 --- a/src/roles/ha_db_hana/tasks/resource-migration.yml +++ b/src/roles/ha_db_hana/tasks/resource-migration.yml @@ -100,6 +100,7 @@ - name: "Test Execution: Validate HANA DB cluster status 1" get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" @@ -154,6 +155,7 @@ - name: "Test Execution: Validate HANA DB cluster status 2" get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" diff --git a/src/roles/ha_db_hana/tasks/sbd-fencing.yml b/src/roles/ha_db_hana/tasks/sbd-fencing.yml index 9772fe8c..aa794ec4 100644 --- a/src/roles/ha_db_hana/tasks/sbd-fencing.yml +++ b/src/roles/ha_db_hana/tasks/sbd-fencing.yml @@ -56,6 +56,7 @@ - name: "Test Execution: Validate HANA DB cluster status during stop operation." when: cluster_status_pre.AUTOMATED_REGISTER == "true" get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" @@ -71,6 +72,7 @@ block: - name: "Test Execution: Validate HANA DB cluster status 2" get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" diff --git a/src/roles/ha_db_hana/tasks/secondary-crash-index.yml b/src/roles/ha_db_hana/tasks/secondary-crash-index.yml index 4b602de9..986c7d96 100644 --- a/src/roles/ha_db_hana/tasks/secondary-crash-index.yml +++ b/src/roles/ha_db_hana/tasks/secondary-crash-index.yml @@ -53,6 +53,7 @@ block: - name: "Test Execution: Validate HANA DB cluster status" get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" @@ -65,6 +66,7 @@ - name: "Test Execution: Validate HANA DB cluster status 2" get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "post_failover" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" diff --git a/src/roles/ha_db_hana/tasks/secondary-echo-b.yml b/src/roles/ha_db_hana/tasks/secondary-echo-b.yml index d566b718..1836717d 100644 --- a/src/roles/ha_db_hana/tasks/secondary-echo-b.yml +++ b/src/roles/ha_db_hana/tasks/secondary-echo-b.yml @@ -49,6 +49,7 @@ block: - name: "Test Execution: Validate HANA DB cluster status" get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" @@ -61,6 +62,7 @@ - name: "Test Execution: Validate HANA DB cluster status 2" get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "post_failover" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" diff --git a/src/roles/ha_db_hana/tasks/secondary-node-kill.yml b/src/roles/ha_db_hana/tasks/secondary-node-kill.yml index d5c4c4fd..19d65184 100644 --- a/src/roles/ha_db_hana/tasks/secondary-node-kill.yml +++ b/src/roles/ha_db_hana/tasks/secondary-node-kill.yml @@ -47,6 +47,7 @@ block: - name: "Test Execution: Validate HANA DB cluster status" get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" @@ -59,6 +60,7 @@ - name: "Test execution: Validate HANA DB cluster status 2" get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" diff --git a/src/roles/misc/tasks/cluster-report.yml b/src/roles/misc/tasks/cluster-report.yml index 61fec66a..f37a8440 100644 --- a/src/roles/misc/tasks/cluster-report.yml +++ b/src/roles/misc/tasks/cluster-report.yml @@ -8,6 +8,7 @@ - name: "Get the cluster status" become: true get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "cluster_report_collection" database_sid: "{{ db_sid | lower | default('') }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" diff --git a/src/roles/misc/tasks/pre-validations-db.yml b/src/roles/misc/tasks/pre-validations-db.yml index 86f47929..614a58ef 100644 --- a/src/roles/misc/tasks/pre-validations-db.yml +++ b/src/roles/misc/tasks/pre-validations-db.yml @@ -21,6 +21,7 @@ - name: "Pre Validation: Validate HANA DB cluster status on primary node" become: true get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "pre_failover" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" From 377141a01f170c2995df8316d9058a22e8286e1a Mon Sep 17 00:00:00 2001 From: devanshjain Date: Thu, 26 Jun 2025 20:51:32 +0000 Subject: [PATCH 5/6] Add db_instance_number parameter to get_cluster_status_db module and update mock data --- tests/modules/get_cluster_status_db_test.py | 3 +++ tests/roles/mock_data/get_cluster_status_db.txt | 3 ++- tests/roles/mock_data/secondary_get_cluster_status_db.txt | 3 ++- 3 files changed, 7 insertions(+), 2 deletions(-) diff --git a/tests/modules/get_cluster_status_db_test.py b/tests/modules/get_cluster_status_db_test.py index b689a109..86340430 100644 --- a/tests/modules/get_cluster_status_db_test.py +++ b/tests/modules/get_cluster_status_db_test.py @@ -31,6 +31,7 @@ def hana_checker_classic(self): database_sid="TEST", ansible_os_family=OperatingSystemFamily.REDHAT, saphanasr_provider=HanaSRProvider.SAPHANASR, + db_instance_number="00", ) @pytest.fixture @@ -45,6 +46,7 @@ def hana_checker_angi(self): database_sid="TEST", ansible_os_family=OperatingSystemFamily.SUSE, saphanasr_provider=HanaSRProvider.ANGI, + db_instance_number="00", ) def test_get_automation_register(self, mocker, hana_checker_classic): @@ -279,6 +281,7 @@ def test_run_module(self, mocker): "database_sid": "TEST", "operation_step": "check", "saphanasr_provider": "SAPHanaSR", + "db_instance_number": "00", } mocker.patch( "src.modules.get_cluster_status_db.ansible_facts", return_value={"os_family": "REDHAT"} diff --git a/tests/roles/mock_data/get_cluster_status_db.txt b/tests/roles/mock_data/get_cluster_status_db.txt index e406ad9f..2658cf76 100644 --- a/tests/roles/mock_data/get_cluster_status_db.txt +++ b/tests/roles/mock_data/get_cluster_status_db.txt @@ -9,7 +9,8 @@ def main(): argument_spec=dict( operation_step=dict(type="str", required=True), database_sid=dict(type="str", required=True), - saphanasr_provider=dict(type="str", required=True) + saphanasr_provider=dict(type="str", required=True), + db_instance_number=dict(type="str", required=True), ) ) diff --git a/tests/roles/mock_data/secondary_get_cluster_status_db.txt b/tests/roles/mock_data/secondary_get_cluster_status_db.txt index fb4378cd..afbec0a7 100644 --- a/tests/roles/mock_data/secondary_get_cluster_status_db.txt +++ b/tests/roles/mock_data/secondary_get_cluster_status_db.txt @@ -9,7 +9,8 @@ def main(): argument_spec=dict( operation_step=dict(type="str", required=True), database_sid=dict(type="str", required=True), - saphanasr_provider=dict(type="str", required=True) + saphanasr_provider=dict(type="str", required=True), + db_instance_number=dict(type="str", required=True) ) ) From f5f53509e3f0f2fc7fd5371b153a38ab16964f90 Mon Sep 17 00:00:00 2001 From: devanshjain Date: Thu, 26 Jun 2025 21:09:31 +0000 Subject: [PATCH 6/6] Refactor: Improve readability of os_type assignment in get_pcmk_properties_db and get_pcmk_properties_scs modules --- src/modules/get_pcmk_properties_db.py | 4 +++- src/modules/get_pcmk_properties_scs.py | 4 +++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/src/modules/get_pcmk_properties_db.py b/src/modules/get_pcmk_properties_db.py index c9b06007..f044b302 100644 --- a/src/modules/get_pcmk_properties_db.py +++ b/src/modules/get_pcmk_properties_db.py @@ -650,7 +650,9 @@ def main() -> None: ) validator = HAClusterValidator( - os_type=OperatingSystemFamily(str(ansible_facts(module).get("os_family", "UNKNOWN")).upper()), + os_type=OperatingSystemFamily( + str(ansible_facts(module).get("os_family", "UNKNOWN")).upper() + ), os_version=module.params["os_version"], instance_number=module.params["instance_number"], sid=module.params["sid"], diff --git a/src/modules/get_pcmk_properties_scs.py b/src/modules/get_pcmk_properties_scs.py index 70c5c77d..b0cd5063 100644 --- a/src/modules/get_pcmk_properties_scs.py +++ b/src/modules/get_pcmk_properties_scs.py @@ -587,7 +587,9 @@ def main() -> None: sid=module.params["sid"], scs_instance_number=module.params["ascs_instance_number"], ers_instance_number=module.params["ers_instance_number"], - os_type=OperatingSystemFamily(str(ansible_facts(module).get("os_family", "UNKNOWN")).upper()), + os_type=OperatingSystemFamily( + str(ansible_facts(module).get("os_family", "UNKNOWN")).upper() + ), virtual_machine_name=module.params["virtual_machine_name"], constants=module.params["pcmk_constants"], fencing_mechanism=module.params["fencing_mechanism"],