From baec97e89dc5f47760ef7a43cb60b289ea0fae49 Mon Sep 17 00:00:00 2001 From: Devansh Jain <86314060+devanshjainms@users.noreply.github.com> Date: Fri, 27 Jun 2025 09:03:27 -0700 Subject: [PATCH 1/6] Extend support for SAPHanaSR-angi provider (#90) --- .gitignore | 1 + docs/HIGH_AVAILABILITY.md | 46 +- requirements.txt | 55 +-- scripts/sap_automation_qa.sh | 160 ++++++- scripts/setup.sh | 11 +- src/module_utils/commands.py | 13 +- src/module_utils/enums.py | 139 ++++++ src/module_utils/filter_tests.py | 134 ++++++ src/module_utils/get_cluster_status.py | 102 +++-- src/module_utils/sap_automation_qa.py | 67 +-- src/modules/check_indexserver.py | 35 +- src/modules/filesystem_freeze.py | 2 +- src/modules/get_azure_lb.py | 29 +- src/modules/get_cluster_status_db.py | 110 +++-- src/modules/get_cluster_status_scs.py | 17 +- src/modules/get_pcmk_properties_db.py | 82 ++-- src/modules/get_pcmk_properties_scs.py | 43 +- src/modules/location_constraints.py | 28 +- src/modules/log_parser.py | 32 +- src/modules/send_telemetry_data.py | 18 +- src/roles/ha_db_hana/tasks/block-network.yml | 12 +- .../ha_db_hana/tasks/files/constants.yaml | 82 +++- src/roles/ha_db_hana/tasks/fs-freeze.yml | 6 +- src/roles/ha_db_hana/tasks/ha-config.yml | 6 +- .../ha_db_hana/tasks/primary-crash-index.yml | 10 +- src/roles/ha_db_hana/tasks/primary-echo-b.yml | 9 +- .../ha_db_hana/tasks/primary-node-crash.yml | 6 +- .../ha_db_hana/tasks/primary-node-kill.yml | 9 +- .../ha_db_hana/tasks/resource-migration.yml | 28 +- src/roles/ha_db_hana/tasks/sbd-fencing.yml | 6 +- .../tasks/secondary-crash-index.yml | 7 +- .../ha_db_hana/tasks/secondary-echo-b.yml | 6 +- .../ha_db_hana/tasks/secondary-node-kill.yml | 8 +- src/roles/ha_scs/tasks/ascs-migration.yml | 1 - src/roles/ha_scs/tasks/ascs-node-crash.yml | 3 - src/roles/ha_scs/tasks/block-network.yml | 2 - src/roles/ha_scs/tasks/ha-config.yml | 1 - .../ha_scs/tasks/ha-failover-to-node.yml | 1 - .../ha_scs/tasks/kill-enqueue-replication.yml | 2 - .../ha_scs/tasks/kill-enqueue-server.yml | 3 - .../ha_scs/tasks/kill-message-server.yml | 3 - .../ha_scs/tasks/kill-sapstartsrv-process.yml | 1 - src/roles/ha_scs/tasks/manual-restart.yml | 2 - src/roles/misc/tasks/cluster-report.yml | 4 +- .../misc/tasks/get-saphanasr-provider.yml | 29 ++ src/roles/misc/tasks/post-validations.yml | 1 - src/roles/misc/tasks/pre-validations-db.yml | 12 +- src/roles/misc/tasks/pre-validations-scs.yml | 1 - src/roles/misc/tasks/rescue.yml | 1 - src/roles/misc/tasks/var-log-messages.yml | 1 - src/vars/input-api.yaml | 3 + tests/module_utils/filter_tests_test.py | 424 ++++++++++++++++++ tests/module_utils/get_cluster_status_test.py | 179 +++++++- tests/module_utils/sap_automation_qa_test.py | 4 +- tests/modules/check_indexserver_test.py | 46 +- tests/modules/get_cluster_status_db_test.py | 188 ++++++-- tests/modules/get_cluster_status_scs_test.py | 3 + tests/modules/get_pcmk_properties_db_test.py | 267 ++++++++++- tests/modules/get_pcmk_properties_scs_test.py | 199 +++++++- tests/modules/location_constraints_test.py | 19 +- tests/modules/log_parser_test.py | 141 +++++- tests/roles/ha_db_hana/block_network_test.py | 5 +- tests/roles/ha_db_hana/ha_config_test.py | 1 + .../roles/ha_db_hana/primary_node_ops_test.py | 1 + .../ha_db_hana/resource_migration_test.py | 5 + .../ha_db_hana/secondary_node_ops_test.py | 1 + .../mock_data/SAPHanaSR-manageProvider.txt | 5 + tests/roles/mock_data/check_indexserver.txt | 1 - .../roles/mock_data/get_cluster_status_db.txt | 3 +- .../mock_data/get_cluster_status_scs.txt | 1 - .../mock_data/get_pcmk_properties_db.txt | 2 +- .../mock_data/get_pcmk_properties_scs.txt | 1 - .../roles/mock_data/location_constraints.txt | 2 - tests/roles/mock_data/log_parser.txt | 1 - .../secondary_get_cluster_status_db.txt | 3 +- tests/roles/roles_testing_base.py | 1 + 76 files changed, 2410 insertions(+), 483 deletions(-) create mode 100644 src/module_utils/enums.py create mode 100644 src/module_utils/filter_tests.py create mode 100644 src/roles/misc/tasks/get-saphanasr-provider.yml create mode 100644 tests/module_utils/filter_tests_test.py create mode 100644 tests/roles/mock_data/SAPHanaSR-manageProvider.txt diff --git a/.gitignore b/.gitignore index 34fe382f..82d7c8f1 100644 --- a/.gitignore +++ b/.gitignore @@ -408,3 +408,4 @@ WORKSPACES/* VMWPASSWORD .coverage.* *.ini +.ansible/ diff --git a/docs/HIGH_AVAILABILITY.md b/docs/HIGH_AVAILABILITY.md index 071036c4..d0f25736 100644 --- a/docs/HIGH_AVAILABILITY.md +++ b/docs/HIGH_AVAILABILITY.md @@ -21,8 +21,10 @@ Currently SAP Testing Automation Framework is supported for below Linux distros |-----------|------|--------------|---------| | SAP Central Services | ENSA1 or ENSA2 | Azure Fencing Agent | Azure Files or ANF | | SAP Central Services | ENSA1 or ENSA2 | ISCSI (SBD device) | Azure Files or ANF | +| SAP Central Services | ENSA1 or ENSA2 | Azure Shared Disks (SBD device) | Azure Files or ANF | | SAP HANA | Scale-up | Azure Fencing Agent | Azure Managed Disk or ANF | | SAP HANA | Scale-up | ISCSI (SBD device) | Azure Managed Disk or ANF | +| SAP HANA | Scale-up | Azure Shared Disks (SBD device) | Azure Managed Disk or ANF | For SAP Central Services on SLES, both the simple mount approach and the classic method are supported. @@ -238,9 +240,10 @@ db_high_availability: true # The high availability configuration of the SCS and DB instance. Supported values are: # - AFA (for Azure Fencing Agent) -# - ISCSI (for SBD devices) -scs_cluster_type: "AFA" # or "ISCSI" -database_cluster_type: "AFA" # or "ISCSI" +# - ISCSI (for SBD devices with ISCSI target servers) +# - ASD (for SBD devices with Azure Shared Disks) +scs_cluster_type: "AFA" # or "ISCSI" or "ASD" +database_cluster_type: "AFA" # or "ISCSI" or "ASD" # The instance number of the SCS, ERS and DB instance. scs_instance_number: "00" @@ -273,23 +276,54 @@ key_vault_id: /subscriptions//resourceGroups/< secret_id: https://.vault.azure.net/secrets// ``` -2.2.3. Credential Files +2.2.3. **Credential Files** (Available locally) The required credential files depend on the authentication method used to connect to the SAP system: -1. SSH Key Authentication: If connecting via SSH key, place the private key inside `WORKSPACE/SYSTEM/` and name the file "ssh_key.ppk". -1. Username and Password Authentication: If connecting using a username and password, create a password file by running the following command. It takes the username from hosts.yaml file. +1. **SSH Key Authentication**: If connecting via SSH key, place the private key inside `WORKSPACE/SYSTEM/` and name the file "ssh_key.ppk". +1. **Password Authentication**: If connecting using a username and password, create a password file by running the following command. It takes the username from hosts.yaml file. ```bash echo "password" > WORKSPACES/SYSTEM//password ``` +2.2.4. **Credential Files** (From Azure Key Vault) + +When using Azure Key Vault to store credentials, the framework retrieves authentication details directly from the key vault using the configured managed identity. + + **Authentication Methods:** + + 1. **SSH Key Authentication**: Store the private SSH key content in Azure Key Vault as a secret. + 2. **Password Authentication**: Store the password in Azure Key Vault as a secret. The username is taken from the `hosts.yaml` file. + + **Setup:** + + 1. Ensure the managed identity has "Key Vault Secrets User" role on the key vault. + + 2. Configure `key_vault_id` and `secret_id` parameters in `sap-parameters.yaml` as shown in section 2.2.2. + + **Important**: When using Key Vault authentication, do NOT create local credential files (`ssh_key.ppk` or `password` files). + + ### 3. Test Execution To execute the script, run following command: ```bash +# Run all the tests with default parameters ./scripts/sap_automation_qa.sh + +# Run specific test cases from HA_DB_HANA group +./scripts/sap_automation_qa.sh --test_groups=HA_DB_HANA --test_cases=[ha-config,primary-node-crash] + +# Run all enabled tests in HA_DB_HANA group +./scripts/sap_automation_qa.sh --test_groups=HA_DB_HANA + +# Run all enabled tests in HA_SCS group +./scripts/sap_automation_qa.sh --test_groups=HA_SCS + +# Run with verbose output +./scripts/sap_automation_qa.sh --test_groups=HA_DB_HANA --test_cases=[ha-config] -vv ``` ### 4. Viewing Test Results diff --git a/requirements.txt b/requirements.txt index 2e8902b2..8220ba41 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,18 +4,18 @@ # # pip-compile requirements.in # -ansible-compat==25.1.5 +ansible-compat==25.5.0 # via ansible-lint -ansible-core==2.17.11 +ansible-core==2.17.12 # via # -r requirements.in # ansible-compat # ansible-lint -ansible-lint==25.4.0 +ansible-lint==25.5.0 # via -r requirements.in ansible-runner==2.4.1 # via -r requirements.in -astroid==3.3.9 +astroid==3.3.10 # via pylint attrs==25.3.0 # via @@ -30,7 +30,7 @@ azure-core==1.34.0 # azure-mgmt-core # azure-storage-blob # azure-storage-queue -azure-identity==1.21.0 +azure-identity==1.23.0 # via # -r requirements.in # azure-kusto-data @@ -42,7 +42,7 @@ azure-kusto-ingest==5.0.3 # via -r requirements.in azure-mgmt-core==1.5.0 # via azure-mgmt-network -azure-mgmt-network==28.1.0 +azure-mgmt-network==29.0.0 # via -r requirements.in azure-storage-blob==12.23.0 # via @@ -64,15 +64,15 @@ cffi==1.17.1 # via cryptography charset-normalizer==3.4.2 # via requests -click==8.1.8 +click==8.2.1 # via # -r requirements.in # black -coverage[toml]==7.8.0 +coverage[toml]==7.9.0 # via # -r requirements.in # pytest-cov -cryptography==44.0.3 +cryptography==45.0.4 # via # ansible-core # azure-identity @@ -82,13 +82,13 @@ cryptography==44.0.3 # pyjwt dill==0.4.0 # via pylint -exceptiongroup==1.2.2 +exceptiongroup==1.3.0 # via pytest filelock==3.18.0 # via ansible-lint idna==3.10 # via requests -ijson==3.3.0 +ijson==3.4.0 # via azure-kusto-data importlib-metadata==8.7.0 # via ansible-lint @@ -107,7 +107,7 @@ jinja2==3.1.6 # ansible-core jmespath==1.0.1 # via -r requirements.in -jsonschema==4.23.0 +jsonschema==4.24.0 # via # ansible-compat # ansible-lint @@ -132,7 +132,7 @@ msal-extensions==1.3.1 # via azure-identity mypy-extensions==1.1.0 # via black -numpy==2.2.5 +numpy==2.2.6 # via # -r requirements.in # pandas @@ -144,7 +144,7 @@ packaging==25.0 # ansible-runner # black # pytest -pandas==2.2.3 +pandas==2.3.0 # via -r requirements.in pathspec==0.12.1 # via @@ -153,32 +153,34 @@ pathspec==0.12.1 # yamllint pexpect==4.9.0 # via ansible-runner -platformdirs==4.3.7 +platformdirs==4.3.8 # via # black # pylint -pluggy==1.5.0 +pluggy==1.6.0 # via pytest ptyprocess==0.7.0 # via pexpect pycparser==2.22 # via cffi pygments==2.19.1 - # via rich + # via + # pytest + # rich pyjwt[crypto]==2.10.1 # via # msal # pyjwt pylint==3.3.7 # via -r requirements.in -pytest==8.3.5 +pytest==8.4.0 # via # -r requirements.in # pytest-cov # pytest-mock -pytest-cov==6.1.1 +pytest-cov==6.2.0 # via -r requirements.in -pytest-mock==3.14.0 +pytest-mock==3.14.1 # via -r requirements.in python-daemon==3.1.2 # via ansible-runner @@ -201,7 +203,7 @@ referencing==0.36.2 # ansible-lint # jsonschema # jsonschema-specifications -requests==2.32.3 +requests==2.32.4 # via # -r requirements.in # azure-core @@ -211,11 +213,11 @@ resolvelib==1.0.1 # via ansible-core rich==14.0.0 # via -r requirements.in -rpds-py==0.24.0 +rpds-py==0.25.1 # via # jsonschema # referencing -ruamel-yaml==0.18.10 +ruamel-yaml==0.18.14 # via ansible-lint ruamel-yaml-clib==0.2.12 # via ruamel-yaml @@ -235,9 +237,9 @@ tomli==2.2.1 # coverage # pylint # pytest -tomlkit==0.13.2 +tomlkit==0.13.3 # via pylint -typing-extensions==4.13.2 +typing-extensions==4.14.0 # via # astroid # azure-core @@ -246,6 +248,7 @@ typing-extensions==4.13.2 # azure-storage-blob # azure-storage-queue # black + # exceptiongroup # referencing # rich tzdata==2025.2 @@ -256,5 +259,5 @@ wcmatch==10.0 # via ansible-lint yamllint==1.37.1 # via ansible-lint -zipp==3.21.0 +zipp==3.23.0 # via importlib-metadata diff --git a/scripts/sap_automation_qa.sh b/scripts/sap_automation_qa.sh index 2119e08e..e8fb538d 100755 --- a/scripts/sap_automation_qa.sh +++ b/scripts/sap_automation_qa.sh @@ -3,6 +3,7 @@ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. +set -euo pipefail # Activate the virtual environment source "$(realpath $(dirname $(realpath $0))/..)/.venv/bin/activate" @@ -21,6 +22,57 @@ NC='\033[0m' # Global variable to store the path of the temporary file. temp_file="" +# Parse command line arguments and extract verbose flags +# Sets global ANSIBLE_VERBOSE variable +parse_arguments() { + ANSIBLE_VERBOSE="" + + for arg in "$@"; do + case "$arg" in + -v|-vv|-vvv|-vvvv|-vvvvv|-vvvvvv) + ANSIBLE_VERBOSE="$arg" + ;; + --test_groups=*) + TEST_GROUPS="${arg#*=}" + ;; + --test_cases=*) + TEST_CASES="${arg#*=}" + # Remove brackets and convert to array + TEST_CASES="${TEST_CASES#[}" + TEST_CASES="${TEST_CASES%]}" + ;; + --extra-vars=*) + EXTRA_VARS="${arg#*=}" + ;; + -h|--help) + show_usage + exit 0 + ;; + esac + done +} + +show_usage() { + cat << EOF +Usage: $0 [OPTIONS] + +Options: + -v, -vv, -vvv, etc. Set Ansible verbosity level + --test_groups=GROUP Specify test group to run (e.g., HA_DB_HANA, HA_SCS) + --test_cases=[case1,case2] Specify specific test cases to run (comma-separated, in brackets) + --extra-vars=VAR Specify additional Ansible extra variables (e.g., --extra-vars='{"key":"value"}') + -h, --help Show this help message + +Examples: + $0 --test_groups=HA_DB_HANA --test_cases=[ha-config,primary-node-crash] + $0 --test_groups=HA_SCS + $0 --test_groups=HA_DB_HANA --test_cases=[ha-config,primary-node-crash] -vv + $0 --test_groups=HA_DB_HANA --test_cases=[ha-config,primary-node-crash] --extra-vars='{"key":"value"}' + +Configuration is read from vars.yaml file. +EOF +} + # Print logs with color based on severity. # :param severity: The severity level of the log (e.g., "INFO", "ERROR"). # :param message: The message to log. @@ -127,6 +179,40 @@ get_playbook_name() { esac } +# Generate filtered test configuration as JSON for Ansible extra vars +# :return: JSON string with filtered test configuration +get_filtered_test_config() { + local input_api_file="${cmd_dir}/../src/vars/input-api.yaml" + local test_filter_script="${cmd_dir}/../src/module_utils/filter_tests.py" + + if [[ ! -f "$test_filter_script" ]]; then + log "ERROR" "Test filter script not found: $test_filter_script" >&2 + exit 1 + fi + + local group_arg="null" + local cases_arg="null" + + if [[ -n "$TEST_GROUPS" ]]; then + group_arg="$TEST_GROUPS" + fi + + if [[ -n "$TEST_CASES" ]]; then + cases_arg="$TEST_CASES" + fi + + local filtered_config + filtered_config=$(python3 "$test_filter_script" "$input_api_file" "$group_arg" "$cases_arg" 2>&1) + local exit_code=$? + + if [[ $exit_code -ne 0 ]]; then + log "ERROR" "Failed to filter test configuration: $filtered_config" >&2 + exit 1 + fi + + echo "$filtered_config" +} + # Retrieve a secret from Azure Key Vault. # :param key_vault_id: The ID of the Key Vault. # :param secret_id: The ID of the secret in the Key Vault. @@ -184,7 +270,7 @@ retrieve_secret_from_key_vault() { if [[ -f "$temp_file" ]]; then log "ERROR" "Temporary file already exists: $temp_file" exit 1 - fi + fi # Create the temporary file and write the secret value to it echo "$secret_value" > "$temp_file" @@ -210,6 +296,22 @@ run_ansible_playbook() { local auth_type=$4 local system_config_folder=$5 + + local extra_vars="" + if [[ -n "$TEST_GROUPS" || -n "$TEST_CASES" ]]; then + local filtered_config + filtered_config=$(get_filtered_test_config) + if [[ -n "$filtered_config" ]]; then + extra_vars="--extra-vars '$filtered_config'" + fi + fi + + if [[ -n "$EXTRA_VARS" ]]; then + log a "INFO" "Using additional extra vars: $EXTRA_VARS" + escaped_extra_vars="${EXTRA_VARS//\'/\'\"\'\"\'}" + extra_vars+=" --extra-vars '$escaped_extra_vars'" + fi + # Set local secret_id and key_vault_id if defined local secret_id=$(grep "^secret_id:" "$system_params" | awk '{split($0,a,": "); print a[2]}' | xargs || true) local key_vault_id=$(grep "^key_vault_id:" "$system_params" | awk '{split($0,a,": "); print a[2]}' | xargs || true) @@ -232,13 +334,39 @@ run_ansible_playbook() { check_file_exists "$temp_file" \ "Temporary SSH key file not found. Please check the Key Vault secret ID." command="ansible-playbook ${cmd_dir}/../src/$playbook_name.yml -i $system_hosts --private-key $temp_file \ - -e @$VARS_FILE -e @$system_params -e '_workspace_directory=$system_config_folder'" + -e @$VARS_FILE -e @$system_params -e '_workspace_directory=$system_config_folder' $extra_vars" else - check_file_exists "${cmd_dir}/../WORKSPACES/SYSTEM/$SYSTEM_CONFIG_NAME/ssh_key.ppk" \ - "ssh_key.ppk not found in WORKSPACES/SYSTEM/$SYSTEM_CONFIG_NAME directory." - ssh_key="${cmd_dir}/../WORKSPACES/SYSTEM/$SYSTEM_CONFIG_NAME/ssh_key.ppk" + local ssh_key_dir="${cmd_dir}/../WORKSPACES/SYSTEM/$SYSTEM_CONFIG_NAME" + local ssh_key="" + local extensions=("ppk" "pem" "key" "private" "rsa" "ed25519" "ecdsa" "dsa" "") + + for ext in "${extensions[@]}"; do + if [[ -n "$ext" ]]; then + local key_file="${ssh_key_dir}/ssh_key.${ext}" + else + local key_file="${ssh_key_dir}/ssh_key" + fi + + if [[ -f "$key_file" ]]; then + ssh_key="$key_file" + log "INFO" "Found SSH key file: $ssh_key" + break + fi + done + + if [[ -z "$ssh_key" ]]; then + ssh_key=$(find "$ssh_key_dir" -name "*ssh_key*" -type f | head -n 1) + if [[ -n "$ssh_key" ]]; then + log "INFO" "Found SSH key file with pattern: $ssh_key" + fi + fi + + check_file_exists "$ssh_key" \ + "SSH key file not found in WORKSPACES/SYSTEM/$SYSTEM_CONFIG_NAME directory. Looked for files with patterns: ssh_key.*, *ssh_key*" + + chmod 600 "$ssh_key" command="ansible-playbook ${cmd_dir}/../src/$playbook_name.yml -i $system_hosts --private-key $ssh_key \ - -e @$VARS_FILE -e @$system_params -e '_workspace_directory=$system_config_folder'" + -e @$VARS_FILE -e @$system_params -e '_workspace_directory=$system_config_folder' $extra_vars" fi elif [[ "$auth_type" == "VMPASSWORD" ]]; then @@ -267,8 +395,12 @@ run_ansible_playbook() { exit 1 fi - log "INFO" "Running ansible playbook..." - log "INFO" "Executing: $command" + # Add verbosity if specified + if [[ -n "$ANSIBLE_VERBOSE" ]]; then + command+=" $ANSIBLE_VERBOSE" + fi + + log "INFO" "Running ansible playbook... Command: $command" eval $command return_code=$? log "INFO" "Ansible playbook execution completed with return code: $return_code" @@ -288,6 +420,16 @@ main() { log "INFO" "Activate the virtual environment..." set -e + # Parse command line arguments + parse_arguments "$@" + + if [[ -n "$TEST_GROUPS" ]]; then + log "INFO" "Test group specified: $TEST_GROUPS" + fi + if [[ -n "$TEST_CASES" ]]; then + log "INFO" "Test cases specified: $TEST_CASES" + fi + # Validate parameters validate_params @@ -314,4 +456,4 @@ main() { } # Execute the main function -main \ No newline at end of file +main "$@" diff --git a/scripts/setup.sh b/scripts/setup.sh index d113356f..a47a65d0 100755 --- a/scripts/setup.sh +++ b/scripts/setup.sh @@ -1,8 +1,9 @@ +#!/bin/bash + # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. -#!/bin/bash - +set -euo pipefail # Function to check if a command exists command_exists() { command -v "$1" &> /dev/null @@ -52,9 +53,9 @@ packages=("python3-pip" "ansible" "sshpass" "python3-venv") install_packages "${packages[@]}" -if [ ! -d "../.venv" ]; then +if [ ! -d ".venv" ]; then log "INFO" "Creating Python virtual environment..." - if python3 -m venv ../.venv; then + if python3 -m venv .venv; then log "INFO" "Python virtual environment created." else log "ERROR" "Failed to create Python virtual environment." @@ -64,7 +65,7 @@ fi # Ensure virtual environment is activated log "INFO" "Activating Python virtual environment..." -if source ../.venv/bin/activate; then +if source .venv/bin/activate; then log "INFO" "Python virtual environment activated." else log "ERROR" "Failed to activate Python virtual environment." diff --git a/src/module_utils/commands.py b/src/module_utils/commands.py index 3ff96d81..c3aa5fb9 100644 --- a/src/module_utils/commands.py +++ b/src/module_utils/commands.py @@ -9,6 +9,11 @@ """ from __future__ import absolute_import, division, print_function +try: + from ansible.module_utils.enums import OperatingSystemFamily +except ImportError: + from src.module_utils.enums import OperatingSystemFamily + __metaclass__ = type DOCUMENTATION = r""" @@ -21,8 +26,8 @@ """ STONITH_ACTION = { - "REDHAT": ["pcs", "property", "config", "stonith-action"], - "SUSE": ["crm", "configure", "get_property", "stonith-action"], + OperatingSystemFamily.REDHAT: ["pcs", "property", "config", "stonith-action"], + OperatingSystemFamily.SUSE: ["crm", "configure", "get_property", "stonith-action"], } AUTOMATED_REGISTER = [ @@ -48,8 +53,8 @@ CONSTRAINTS = ["cibadmin", "--query", "--scope", "constraints"] RSC_CLEAR = { - "SUSE": lambda rsc: ["crm", "resource", "clear", rsc], - "REDHAT": lambda rsc: ["pcs", "resource", "clear", rsc], + OperatingSystemFamily.SUSE: lambda rsc: ["crm", "resource", "clear", rsc], + OperatingSystemFamily.REDHAT: lambda rsc: ["pcs", "resource", "clear", rsc], } CIB_ADMIN = lambda scope: ["cibadmin", "--query", "--scope", scope] diff --git a/src/module_utils/enums.py b/src/module_utils/enums.py new file mode 100644 index 00000000..37da3ccd --- /dev/null +++ b/src/module_utils/enums.py @@ -0,0 +1,139 @@ +""" +This module defines various enumerations and data classes used throughout the sap-automation-qa +""" + +from enum import Enum +from typing import Dict, Any, List + + +class TelemetryDataDestination(Enum): + """ + Enum for the destination of the telemetry data. + """ + + KUSTO = "azuredataexplorer" + LOG_ANALYTICS = "azureloganalytics" + + +class TestStatus(Enum): + """ + Enum for the status of the test case/step. + """ + + SUCCESS = "PASSED" + ERROR = "FAILED" + WARNING = "WARNING" + INFO = "INFO" + NOT_STARTED = "NOT_STARTED" + + +class OperatingSystemFamily(Enum): + """ + Enum for the operating system family. + """ + + REDHAT = "REDHAT" + SUSE = "SUSE" + DEBIAN = "DEBIAN" + WINDOWS = "WINDOWS" + UNKNOWN = "UNKNOWN" + + +class HanaSRProvider(Enum): + """ + Enum for the SAP HANA SR provider type. + """ + + SAPHANASR = "SAPHanaSR" + ANGI = "SAPHanaSR-angi" + + +class Parameters: + """ + This class stores the parameters for the test case. + + :param category: The category of the parameter + :type category: str + :param id: Unique identifier for the parameter + :type id: str + :param name: Name of the parameter + :type name: str + :param value: Current value of the parameter + :type value: Any + :param expected_value: Expected value for validation + :type expected_value: Any + :param status: Current status of the parameter validation + :type status: str + """ + + def __init__( + self, category: str, id: str, name: str, value: Any, expected_value: Any, status: str + ): + self.category = category + self.id = id + self.name = name + self.value = value + self.expected_value = expected_value + self.status = status + + def to_dict(self) -> Dict[str, Any]: + """ + Converts the parameters to a dictionary. + + return: Dictionary containing the parameters + rtype: Dict[str, Any] + """ + return { + "category": self.category, + "id": self.id, + "name": self.name, + "value": self.value, + "expected_value": self.expected_value, + "status": self.status, + } + + +class Result: + """ + This class stores the result of the test case. + + :param status: Current status of the test + :type status: str + :param message: Descriptive message about the result + :type message: str + :param details: List of detailed information + :type details: List[Any] + :param logs: List of log messages + :type logs: List[str] + :param changed: Whether the test caused any changes + :type changed: bool + """ + + def __init__( + self, + status: str = "", + message: str = "", + details: List[Any] = list(), + logs: List[str] = list(), + changed: bool = False, + ): + self.status = status if status is not None else TestStatus.NOT_STARTED.value + self.message = message + self.details = details if details is not None else [] + self.logs = logs if logs is not None else [] + self.changed = changed + + def to_dict(self) -> Dict[str, Any]: + """ + Converts the result to a dictionary. + + return: Dictionary containing the result + rtype: Dict[str, Any] + """ + return { + "status": self.status, + "message": self.message, + "details": self.details.copy(), + "logs": self.logs.copy(), + "changed": self.changed, + } diff --git a/src/module_utils/filter_tests.py b/src/module_utils/filter_tests.py new file mode 100644 index 00000000..24f323e4 --- /dev/null +++ b/src/module_utils/filter_tests.py @@ -0,0 +1,134 @@ +#!/usr/bin/env python3 + +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +""" +Test Filter Module + +This module provides functionality to filter test groups and test cases +from the input-api.yaml configuration based on command line arguments. +""" + +import sys +import json +from typing import Dict, List, Optional, Any +import yaml + + +class TestFilter: + """Filter test configuration based on specified groups and cases.""" + + def __init__(self, input_file: str): + """ + Initialize the TestFilter with the input YAML file. + + :param input_file: Path to the input YAML file + :type input_file: str + """ + self.input_file = input_file + self.config = self._load_config() + + def _load_config(self) -> Dict[str, Any]: + """ + Load the configuration from the input YAML file. + + :return: Loaded configuration + :rtype: Dict[str, Any] + """ + try: + with open(self.input_file, "r", encoding="utf-8") as f: + return yaml.safe_load(f) + except FileNotFoundError: + print(f"Error: Configuration file {self.input_file} not found", file=sys.stderr) + sys.exit(1) + except yaml.YAMLError as e: + print(f"Error parsing YAML file {self.input_file}: {e}", file=sys.stderr) + sys.exit(1) + + def filter_tests( + self, test_group: Optional[str] = None, test_cases: Optional[List[str]] = None + ) -> str: + """ + Filter the test configuration based on the specified test group and test cases. + + :param test_group: Name of the test group to filter, defaults to None + :type test_group: Optional[str], optional + :param test_cases: List of test case task names to include, defaults to None + :type test_cases: Optional[List[str]], optional + :return: JSON string representation of the filtered test configuration + :rtype: str + """ + filtered_config = self.config.copy() + + if test_group or test_cases: + for group in filtered_config["test_groups"]: + if test_group and group["name"] == test_group: + if test_cases: + filtered_cases = [] + for case in group["test_cases"]: + if case["task_name"] in test_cases: + case["enabled"] = True + filtered_cases.append(case) + group["test_cases"] = filtered_cases + elif test_group and group["name"] != test_group: + for case in group["test_cases"]: + case["enabled"] = False + elif test_cases and not test_group: + for case in group["test_cases"]: + if case["task_name"] in test_cases: + case["enabled"] = True + else: + case["enabled"] = False + + return json.dumps(filtered_config, indent=2) + + def get_ansible_vars( + self, test_group: Optional[str] = None, test_cases: Optional[List[str]] = None + ) -> str: + """ + Get Ansible variables from the filtered test configuration. + + :param test_group: Name of the test group to filter, defaults to None + :type test_group: Optional[str], optional + :param test_cases: List of test case task names to include, defaults to None + :type test_cases: Optional[List[str]], optional + :return: JSON string representation of the Ansible variables + :rtype: str + """ + filtered_json = self.filter_tests(test_group, test_cases) + filtered_config = json.loads(filtered_json) + return json.dumps({"test_groups": filtered_config["test_groups"]}) + + +def main(): + """ + Command line interface for the test filter. + """ + if len(sys.argv) < 2: + print( + "Usage: python filter_tests.py [test_group] [test_cases...]", + file=sys.stderr, + ) + print( + "Example: " + + "python filter_tests.py input-api.yaml HA_DB_HANA ha-config,primary-node-crash", + file=sys.stderr, + ) + sys.exit(1) + + input_file = sys.argv[1] + test_group = sys.argv[2] if len(sys.argv) > 2 and sys.argv[2] != "null" else None + test_cases_str = sys.argv[3] if len(sys.argv) > 3 and sys.argv[3] != "null" else None + + test_cases = None + if test_cases_str: + test_cases = [case.strip() for case in test_cases_str.split(",")] + + filter_obj = TestFilter(input_file) + result = filter_obj.get_ansible_vars(test_group, test_cases) + print(result) + + +if __name__ == "__main__": + main() diff --git a/src/module_utils/get_cluster_status.py b/src/module_utils/get_cluster_status.py index 42e09cc2..518bf42e 100644 --- a/src/module_utils/get_cluster_status.py +++ b/src/module_utils/get_cluster_status.py @@ -6,19 +6,22 @@ """ import logging +from abc import abstractmethod import xml.etree.ElementTree as ET from datetime import datetime from typing import Dict, Any try: - from ansible.module_utils.sap_automation_qa import SapAutomationQA, TestStatus + from ansible.module_utils.sap_automation_qa import SapAutomationQA + from ansible.module_utils.enums import TestStatus, OperatingSystemFamily from ansible.module_utils.commands import ( STONITH_ACTION, PACEMAKER_STATUS, CLUSTER_STATUS, ) except ImportError: - from src.module_utils.sap_automation_qa import SapAutomationQA, TestStatus + from src.module_utils.sap_automation_qa import SapAutomationQA + from src.module_utils.enums import TestStatus, OperatingSystemFamily from src.module_utils.commands import ( STONITH_ACTION, PACEMAKER_STATUS, @@ -31,7 +34,7 @@ class BaseClusterStatusChecker(SapAutomationQA): Base class to check the status of a pacemaker cluster. """ - def __init__(self, ansible_os_family: str = ""): + def __init__(self, ansible_os_family: OperatingSystemFamily): super().__init__() self.ansible_os_family = ansible_os_family self.result.update( @@ -44,6 +47,43 @@ def __init__(self, ansible_os_family: str = ""): } ) + @abstractmethod + def _process_node_attributes(self, cluster_status_xml: ET.Element) -> Dict[str, Any]: + """ + Abstract method to process node attributes. + + :param node_attributes: XML element containing node attributes. + :type node_attributes: ET.Element + :raises NotImplementedError: If the method is not implemented in a child class. + :return: Dictionary with node attributes. + :rtype: Dict[str, Any] + """ + raise NotImplementedError("Child classes must implement this method") + + @abstractmethod + def _is_cluster_ready(self) -> bool: + """ + Abstract method to check if the cluster is ready. + To be implemented by child classes. + + :raises NotImplementedError: If the method is not implemented in a child class. + :return: True if the cluster is ready, False otherwise. + :rtype: bool + """ + raise NotImplementedError("Child classes must implement this method") + + @abstractmethod + def _is_cluster_stable(self) -> bool: + """ + Abstract method to check if the cluster is in a stable state. + To be implemented by child classes. + + :raises NotImplementedError: If the method is not implemented in a child class. + :return: True if the cluster is ready, False otherwise. + :rtype: bool + """ + raise NotImplementedError("Child classes must implement this method") + def _get_stonith_action(self) -> None: """ Retrieves the stonith action from the system. @@ -63,7 +103,7 @@ def _get_stonith_action(self) -> None: except Exception as ex: self.log(logging.WARNING, f"Failed to get stonith action: {str(ex)}") - def _validate_cluster_basic_status(self, cluster_status_xml: ET.Element): + def _validate_cluster_basic_status(self, cluster_status_xml: ET.Element) -> None: """ Validate the basic status of the cluster. @@ -76,28 +116,32 @@ def _validate_cluster_basic_status(self, cluster_status_xml: ET.Element): self.result["pacemaker_status"] = "stopped" self.log(logging.INFO, f"Pacemaker status: {self.result['pacemaker_status']}") - if int(cluster_status_xml.find("summary").find("nodes_configured").attrib["number"]) < 2: - self.result["message"] = "Pacemaker cluster isn't stable (insufficient nodes)" + summary = cluster_status_xml.find("summary") + if summary is None: + self.log(logging.ERROR, "Cluster status summary not found in XML") + return + + nodes_configured = summary.find("nodes_configured") + if nodes_configured is None: + self.log(logging.ERROR, "Nodes configured not found in cluster status summary") + return + + if int(nodes_configured.attrib.get("number", 0)) < 2: + self.result["message"] = ( + "Pacemaker cluster isn't stable (insufficient nodes configured)" + ) self.log(logging.WARNING, self.result["message"]) nodes = cluster_status_xml.find("nodes") + if nodes is None: + self.log(logging.ERROR, "Nodes not found in cluster status XML") + return + for node in nodes: if node.attrib["online"] != "true": self.result["message"] = f"Node {node.attrib['name']} is not online" self.log(logging.WARNING, self.result["message"]) - def _process_node_attributes(self, cluster_status_xml: ET.Element) -> Dict[str, Any]: - """ - Abstract method to process node attributes. - - :param node_attributes: XML element containing node attributes. - :type node_attributes: ET.Element - :raises NotImplementedError: If the method is not implemented in a child class. - :return: Dictionary with node attributes. - :rtype: Dict[str, Any] - """ - raise NotImplementedError("Child classes must implement this method") - def run(self) -> Dict[str, str]: """ Run the cluster status check. @@ -128,25 +172,3 @@ def run(self) -> Dict[str, str]: self.result["status"] = TestStatus.SUCCESS.value self.log(logging.INFO, "Cluster status check completed") return self.result - - def _is_cluster_ready(self) -> bool: - """ - Abstract method to check if the cluster is ready. - To be implemented by child classes. - - :raises NotImplementedError: If the method is not implemented in a child class. - :return: True if the cluster is ready, False otherwise. - :rtype: bool - """ - raise NotImplementedError("Child classes must implement this method") - - def _is_cluster_stable(self) -> bool: - """ - Abstract method to check if the cluster is in a stable state. - To be implemented by child classes. - - :raises NotImplementedError: If the method is not implemented in a child class. - :return: True if the cluster is ready, False otherwise. - :rtype: bool - """ - raise NotImplementedError("Child classes must implement this method") diff --git a/src/module_utils/sap_automation_qa.py b/src/module_utils/sap_automation_qa.py index 7b2fa9c7..41fb4091 100644 --- a/src/module_utils/sap_automation_qa.py +++ b/src/module_utils/sap_automation_qa.py @@ -4,63 +4,16 @@ """ from abc import ABC -from enum import Enum import sys import logging import subprocess from typing import Optional, Dict, Any import xml.etree.ElementTree as ET - -class TelemetryDataDestination(Enum): - """ - Enum for the destination of the telemetry data. - """ - - KUSTO = "azuredataexplorer" - LOG_ANALYTICS = "azureloganalytics" - - -class TestStatus(Enum): - """ - Enum for the status of the test case/step. - """ - - SUCCESS = "PASSED" - ERROR = "FAILED" - WARNING = "WARNING" - INFO = "INFO" - NOT_STARTED = "NOT_STARTED" - - -class Parameters: - """ - This class is used to store the parameters for the test case - """ - - def __init__(self, category, id, name, value, expected_value, status): - self.category = category - self.id = id - self.name = name - self.value = value - self.expected_value = expected_value - self.status = status - - def to_dict(self) -> Dict[str, Any]: - """ - This method is used to convert the parameters to a dictionary - - :return: Dictionary containing the parameters - :rtype: Dict[str, Any] - """ - return { - "category": self.category, - "id": self.id, - "name": self.name, - "value": self.value, - "expected_value": self.expected_value, - "status": self.status, - } +try: + from ansible.module_utils.enums import Result, TestStatus +except ImportError: + from src.module_utils.enums import Result, TestStatus class SapAutomationQA(ABC): @@ -71,13 +24,7 @@ class SapAutomationQA(ABC): def __init__(self): self.logger = self.setup_logger() - self.result = { - "status": TestStatus.NOT_STARTED.value, - "message": "", - "details": [], - "logs": [], - "changed": False, - } + self.result = Result().to_dict() def setup_logger(self) -> logging.Logger: """ @@ -107,7 +54,7 @@ def log(self, level: int, message: str): message.replace("\n", " ") self.result["logs"].append(message) - def handle_error(self, exception: Exception, stderr: str = None): + def handle_error(self, exception: Exception, stderr: str = ""): """ Handles command execution errors by logging and updating the result dictionary. @@ -125,7 +72,7 @@ def handle_error(self, exception: Exception, stderr: str = None): self.result["message"] = error_message self.result["logs"].append(error_message) - def execute_command_subprocess(self, command: str, shell_command: bool = False) -> str: + def execute_command_subprocess(self, command: Any, shell_command: bool = False) -> str: """ Executes a shell command using subprocess with a timeout and logs output or errors. diff --git a/src/modules/check_indexserver.py b/src/modules/check_indexserver.py index 7f52a235..a3106fbf 100644 --- a/src/modules/check_indexserver.py +++ b/src/modules/check_indexserver.py @@ -7,11 +7,14 @@ import logging from ansible.module_utils.basic import AnsibleModule +from ansible.module_utils.facts.compat import ansible_facts try: - from ansible.module_utils.sap_automation_qa import SapAutomationQA, TestStatus + from ansible.module_utils.sap_automation_qa import SapAutomationQA + from ansible.module_utils.enums import TestStatus, OperatingSystemFamily except ImportError: - from src.module_utils.sap_automation_qa import SapAutomationQA, TestStatus + from src.module_utils.sap_automation_qa import SapAutomationQA + from src.module_utils.enums import TestStatus, OperatingSystemFamily DOCUMENTATION = r""" --- @@ -27,11 +30,6 @@ - SAP HANA database SID type: str required: true - ansible_os_family: - description: - - Operating system distribution (e.g., 'redhat' or 'suse') - type: str - required: true author: - Microsoft Corporation notes: @@ -45,7 +43,6 @@ - name: Check if SAP HANA indexserver is configured check_indexserver: database_sid: "HDB" - ansible_os_family: "{{ ansible_os_family|lower }}" register: indexserver_result - name: Display indexserver check results @@ -92,7 +89,7 @@ class IndexServerCheck(SapAutomationQA): :type os_distribution: str """ - def __init__(self, database_sid: str, os_distribution: str): + def __init__(self, database_sid: str, os_distribution: OperatingSystemFamily): super().__init__() self.database_sid = database_sid self.os_distribution = os_distribution @@ -102,7 +99,7 @@ def check_indexserver(self) -> None: Checks if the indexserver is configured. """ expected_properties = { - "redhat": [ + OperatingSystemFamily.REDHAT: [ { "[ha_dr_provider_chksrv]": { "provider": "ChkSrv", @@ -116,7 +113,7 @@ def check_indexserver(self) -> None: } }, ], - "suse": [ + OperatingSystemFamily.SUSE: [ { "[ha_dr_provider_suschksrv]": { "provider": "susChkSrv", @@ -129,6 +126,12 @@ def check_indexserver(self) -> None: "path": "/hana/shared/myHooks", } }, + { + "[ha_dr_provider_suschksrv]": { + "provider": "susChkSrv", + "path": "/usr/share/SAPHanaSR-angi", + } + }, ], } @@ -211,14 +214,18 @@ def main(): module = AnsibleModule( argument_spec=dict( database_sid=dict(type="str", required=True), - ansible_os_family=dict(type="str", required=True), + filter=dict(type="str", required=False, default="os_family"), ) ) database_sid = module.params["database_sid"] - os_distribution = module.params["ansible_os_family"] - index_server_check = IndexServerCheck(database_sid, os_distribution) + index_server_check = IndexServerCheck( + database_sid=database_sid, + os_distribution=OperatingSystemFamily( + str(ansible_facts(module).get("os_family", "UNKNOWN")).upper() + ), + ) index_server_check.check_indexserver() module.exit_json(**index_server_check.get_result()) diff --git a/src/modules/filesystem_freeze.py b/src/modules/filesystem_freeze.py index 17bbcd0a..86e28743 100644 --- a/src/modules/filesystem_freeze.py +++ b/src/modules/filesystem_freeze.py @@ -129,7 +129,7 @@ def _find_filesystem(self) -> Tuple[str, str]: return parts[0], "/hana/shared" except FileNotFoundError as ex: self.handle_error(ex) - return None, None + return "", "" def run(self) -> Dict[str, Any]: """ diff --git a/src/modules/get_azure_lb.py b/src/modules/get_azure_lb.py index 14ff0df0..73c3f159 100644 --- a/src/modules/get_azure_lb.py +++ b/src/modules/get_azure_lb.py @@ -13,17 +13,11 @@ from ansible.module_utils.basic import AnsibleModule try: - from ansible.module_utils.sap_automation_qa import ( - SapAutomationQA, - TestStatus, - Parameters, - ) + from ansible.module_utils.sap_automation_qa import SapAutomationQA + from ansible.module_utils.enums import TestStatus, Parameters except ImportError: - from src.module_utils.sap_automation_qa import ( - SapAutomationQA, - TestStatus, - Parameters, - ) + from src.module_utils.sap_automation_qa import SapAutomationQA + from src.module_utils.enums import TestStatus, Parameters DOCUMENTATION = r""" --- @@ -174,7 +168,7 @@ def __init__(self, module_params: Dict): self.network_client = None self.constants = module_params["constants"].get("AZURE_LOADBALANCER", {}) - def _create_network_client(self): + def _create_network_client(self) -> bool: """ Create the network client object. """ @@ -188,11 +182,13 @@ def _create_network_client(self): self.network_client = NetworkManagementClient( self.credential, self.module_params["subscription_id"] ) + return True except Exception as ex: self.handle_error(ex) self.result["message"] += ( " Failed to authenticate to Azure to read the Load " + f"Balancer Details. {ex} \n" ) + return False def get_load_balancers(self) -> list: """ @@ -202,23 +198,24 @@ def get_load_balancers(self) -> list: :rtype: list """ try: + if self.network_client is None: + return [] + load_balancers = self.network_client.load_balancers.list_all() return [ lb.as_dict() for lb in load_balancers - if lb.location.lower() == self.module_params["region"].lower() + if str(lb.location).lower() == self.module_params["region"].lower() ] except Exception as ex: self.handle_error(ex) self.result["message"] += f" Failed to get load balancers. {ex} \n" + return [] - def get_load_balancers_details(self) -> dict: + def get_load_balancers_details(self) -> None: """ Get the details of the load balancers in a specific resource group. - - :return: Dictionary containing the result of the test case. - :rtype: dict """ self._create_network_client() diff --git a/src/modules/get_cluster_status_db.py b/src/modules/get_cluster_status_db.py index 22462283..2928890c 100644 --- a/src/modules/get_cluster_status_db.py +++ b/src/modules/get_cluster_status_db.py @@ -5,16 +5,20 @@ Python script to get and validate the status of a HANA cluster. """ +import logging import xml.etree.ElementTree as ET from typing import Dict, Any from ansible.module_utils.basic import AnsibleModule +from ansible.module_utils.facts.compat import ansible_facts try: from ansible.module_utils.get_cluster_status import BaseClusterStatusChecker + from ansible.module_utils.enums import OperatingSystemFamily, HanaSRProvider from ansible.module_utils.commands import AUTOMATED_REGISTER except ImportError: from src.module_utils.get_cluster_status import BaseClusterStatusChecker from src.module_utils.commands import AUTOMATED_REGISTER + from src.module_utils.enums import OperatingSystemFamily, HanaSRProvider DOCUMENTATION = r""" @@ -37,11 +41,16 @@ - SAP HANA database SID type: str required: true - ansible_os_family: + saphanasr_provider: description: - - Operating system family (redhat, suse, etc.) + - The SAP HANA system replication provider type type: str - required: false + required: true + db_instance_number: + description: + - The instance number of the SAP HANA database + type: str + required: true author: - Microsoft Corporation notes: @@ -58,7 +67,7 @@ get_cluster_status_db: operation_step: "check_cluster" database_sid: "HDB" - ansible_os_family: "{{ ansible_os_family|lower }}" + saphanasr_provider: "SAPHanaSR" register: cluster_result - name: Display cluster status @@ -131,9 +140,17 @@ class HanaClusterStatusChecker(BaseClusterStatusChecker): Class to check the status of a pacemaker cluster in a SAP HANA environment. """ - def __init__(self, database_sid: str, ansible_os_family: str = ""): + def __init__( + self, + database_sid: str, + db_instance_number: str, + saphanasr_provider: HanaSRProvider, + ansible_os_family: OperatingSystemFamily, + ): super().__init__(ansible_os_family) self.database_sid = database_sid + self.saphanasr_provider = saphanasr_provider + self.db_instance_number = db_instance_number self.result.update( { "primary_node": "", @@ -173,44 +190,61 @@ def _process_node_attributes(self, cluster_status_xml: ET.Element) -> Dict[str, "primary_site_name": "", } node_attributes = cluster_status_xml.find("node_attributes") - attribute_map = { - f"hana_{self.database_sid}_op_mode": "operation_mode", - f"hana_{self.database_sid}_srmode": "replication_mode", + if node_attributes is None: + self.log( + logging.ERROR, + "No node attributes found in the cluster status XML.", + ) + return result + + providers = { + HanaSRProvider.SAPHANASR: { + "clone_attr": f"hana_{self.database_sid}_clone_state", + "sync_attr": f"hana_{self.database_sid}_sync_state", + "primary": {"clone": "PROMOTED", "sync": "PRIM"}, + "secondary": {"clone": "DEMOTED", "sync": "SOK"}, + }, + HanaSRProvider.ANGI: { + "clone_attr": f"hana_{self.database_sid}_clone_state", + "sync_attr": f"master-rsc_SAPHanaCon_{self.database_sid.upper()}" + + f"_HDB{self.db_instance_number}", + "primary": {"clone": "PROMOTED", "sync": "150"}, + "secondary": {"clone": "DEMOTED", "sync": "100"}, + }, } + provider_config = providers.get( + self.saphanasr_provider, providers[HanaSRProvider.SAPHANASR] + ) for node in node_attributes: node_name = node.attrib["name"] - node_states = {} - node_attributes_dict = {} - - for attribute in node: - attr_name = attribute.attrib["name"] - attr_value = attribute.attrib["value"] - node_attributes_dict[attr_name] = attr_value - - if attr_name in attribute_map: - result[attribute_map[attr_name]] = attr_value - - if attr_name == f"hana_{self.database_sid}_clone_state": - node_states["clone_state"] = attr_value - elif attr_name == f"hana_{self.database_sid}_sync_state": - node_states["sync_state"] = attr_value - + attrs = {attr.attrib["name"]: attr.attrib["value"] for attr in node} + result["operation_mode"] = attrs.get( + f"hana_{self.database_sid}_op_mode", result["operation_mode"] + ) + result["replication_mode"] = attrs.get( + f"hana_{self.database_sid}_srmode", result["replication_mode"] + ) + clone_state = attrs.get(provider_config["clone_attr"], "") + sync_state = attrs.get(provider_config["sync_attr"], "") if ( - node_states.get("clone_state") == "PROMOTED" - and node_states.get("sync_state") == "PRIM" + clone_state == provider_config["primary"]["clone"] + and sync_state == provider_config["primary"]["sync"] ): - result["primary_node"] = node_name - result["cluster_status"]["primary"] = node_attributes_dict - result["primary_site_name"] = node_attributes_dict.get( - f"hana_{self.database_sid}_site", "" + result.update( + { + "primary_node": node_name, + "primary_site_name": attrs.get(f"hana_{self.database_sid}_site", ""), + } ) + result["cluster_status"]["primary"] = attrs + elif ( - node_states.get("clone_state") == "DEMOTED" - and node_states.get("sync_state") == "SOK" + clone_state == provider_config["secondary"]["clone"] + and sync_state == provider_config["secondary"]["sync"] ): result["secondary_node"] = node_name - result["cluster_status"]["secondary"] = node_attributes_dict + result["cluster_status"]["secondary"] = attrs self.result.update(result) return result @@ -252,14 +286,20 @@ def run_module() -> None: module_args = dict( operation_step=dict(type="str", required=True), database_sid=dict(type="str", required=True), - ansible_os_family=dict(type="str", required=False), + saphanasr_provider=dict(type="str", required=True), + db_instance_number=dict(type="str", required=True), + filter=dict(type="str", required=False, default="os_family"), ) module = AnsibleModule(argument_spec=module_args, supports_check_mode=True) checker = HanaClusterStatusChecker( database_sid=module.params["database_sid"], - ansible_os_family=module.params["ansible_os_family"], + saphanasr_provider=HanaSRProvider(module.params["saphanasr_provider"]), + ansible_os_family=OperatingSystemFamily( + str(ansible_facts(module).get("os_family", "UNKNOWN")).upper() + ), + db_instance_number=module.params["db_instance_number"], ) checker.run() diff --git a/src/modules/get_cluster_status_scs.py b/src/modules/get_cluster_status_scs.py index 76c2cf75..3bf5a966 100644 --- a/src/modules/get_cluster_status_scs.py +++ b/src/modules/get_cluster_status_scs.py @@ -9,15 +9,18 @@ import xml.etree.ElementTree as ET from typing import Dict, Any from ansible.module_utils.basic import AnsibleModule +from ansible.module_utils.facts.compat import ansible_facts try: from ansible.module_utils.get_cluster_status import BaseClusterStatusChecker from ansible.module_utils.commands import CIB_ADMIN + from ansible.module_utils.enums import OperatingSystemFamily except ImportError: from src.module_utils.get_cluster_status import BaseClusterStatusChecker from src.module_utils.commands import ( CIB_ADMIN, ) + from src.module_utils.enums import OperatingSystemFamily DOCUMENTATION = r""" @@ -36,12 +39,6 @@ - Used to identify the specific ASCS and ERS resources. type: str required: true - ansible_os_family: - description: - - Operating system family (e.g., redhat, suse). - - Used to determine OS-specific commands and configurations. - type: str - required: false author: - Microsoft Corporation notes: @@ -58,7 +55,6 @@ - name: Check SAP SCS cluster status get_cluster_status_scs: sap_sid: "S4D" - ansible_os_family: "{{ ansible_os_family|lower }}" register: cluster_result - name: Display SCS cluster status @@ -118,7 +114,7 @@ class SCSClusterStatusChecker(BaseClusterStatusChecker): def __init__( self, sap_sid: str, - ansible_os_family: str = "", + ansible_os_family: OperatingSystemFamily, ): super().__init__(ansible_os_family) self.sap_sid = sap_sid @@ -285,14 +281,15 @@ def run_module() -> None: """ module_args = dict( sap_sid=dict(type="str", required=True), - ansible_os_family=dict(type="str", required=False), + filter=dict(type="str", required=False, default="os_family"), ) module = AnsibleModule(argument_spec=module_args, supports_check_mode=True) + ansible_os_family = str(ansible_facts(module).get("os_family", "UNKNOWN")).upper() checker = SCSClusterStatusChecker( sap_sid=module.params["sap_sid"], - ansible_os_family=module.params["ansible_os_family"], + ansible_os_family=OperatingSystemFamily(ansible_os_family), ) checker.run() diff --git a/src/modules/get_pcmk_properties_db.py b/src/modules/get_pcmk_properties_db.py index 5a67e85a..f044b302 100644 --- a/src/modules/get_pcmk_properties_db.py +++ b/src/modules/get_pcmk_properties_db.py @@ -11,21 +11,22 @@ HAClusterValidator: Main validator class for cluster configurations. """ +import logging from ansible.module_utils.basic import AnsibleModule +from ansible.module_utils.facts.compat import ansible_facts try: - from ansible.module_utils.sap_automation_qa import ( - SapAutomationQA, - TestStatus, + from ansible.module_utils.sap_automation_qa import SapAutomationQA + from ansible.module_utils.enums import ( + OperatingSystemFamily, Parameters, + TestStatus, + HanaSRProvider, ) from ansible.module_utils.commands import CIB_ADMIN except ImportError: - from src.module_utils.sap_automation_qa import ( - SapAutomationQA, - TestStatus, - Parameters, - ) + from src.module_utils.sap_automation_qa import SapAutomationQA + from src.module_utils.enums import OperatingSystemFamily, Parameters, TestStatus, HanaSRProvider from src.module_utils.commands import CIB_ADMIN DOCUMENTATION = r""" @@ -48,11 +49,6 @@ - SAP HANA instance number type: str required: true - ansible_os_family: - description: - - Operating system family (redhat, suse, etc.) - type: str - required: true virtual_machine_name: description: - Name of the virtual machine @@ -73,6 +69,11 @@ - Dictionary of constants for validation type: dict required: true + saphanasr_provider: + description: + - SAP HANA SR provider type (e.g., SAPHanaSR, SAPHanaSR-angi) + type: str + required: true author: - Microsoft Corporation notes: @@ -89,7 +90,6 @@ get_pcmk_properties_db: sid: "HDB" instance_number: "00" - ansible_os_family: "{{ ansible_os_family|lower }}" virtual_machine_name: "{{ ansible_hostname }}" fencing_mechanism: "sbd" os_version: "{{ ansible_distribution_version }}" @@ -180,27 +180,31 @@ class HAClusterValidator(SapAutomationQA): "sbd_stonith": ".//primitive[@type='external/sbd']", "fence_agent": ".//primitive[@type='fence_azure_arm']", "topology": ".//clone/primitive[@type='SAPHanaTopology']", + "angi_topology": ".//clone/primitive[@type='SAPHanaTopology']", "topology_meta": ".//clone/meta_attributes", "hana": ".//master/primitive[@type='SAPHana']", "hana_meta": ".//master/meta_attributes", "ipaddr": ".//primitive[@type='IPaddr2']", "filesystem": ".//primitive[@type='Filesystem']", "azurelb": ".//primitive[@type='azure-lb']", + "angi_filesystem": ".//primitive[@type='SAPHanaFilesystem']", + "angi_hana": ".//primitive[@type='SAPHanaController']", } def __init__( self, - os_type, - os_version, - sid, - instance_number, - fencing_mechanism, - virtual_machine_name, - constants, + os_type: OperatingSystemFamily, + os_version: str, + sid: str, + instance_number: str, + fencing_mechanism: str, + virtual_machine_name: str, + constants: dict, + saphanasr_provider: HanaSRProvider, category=None, ): super().__init__() - self.os_type = os_type + self.os_type = os_type.value.upper() self.os_version = os_version self.category = category self.sid = sid @@ -208,6 +212,7 @@ def __init__( self.fencing_mechanism = fencing_mechanism self.virtual_machine_name = virtual_machine_name self.constants = constants + self.saphanasr_provider = saphanasr_provider self.parse_ha_cluster_config() def _get_expected_value(self, category, name): @@ -396,16 +401,22 @@ def _parse_global_ini_parameters(self): :rtype: list """ parameters = [] - global_ini_defaults = self.constants["GLOBAL_INI"].get(self.os_type, {}) - + global_ini_defaults = ( + self.constants["GLOBAL_INI"] + .get(self.os_type, {}) + .get(self.saphanasr_provider.value, {}) + ) with open( f"/usr/sap/{self.sid}/SYS/global/hdb/custom/config/global.ini", "r", encoding="utf-8", ) as file: global_ini_content = file.read().splitlines() - - section_start = global_ini_content.index("[ha_dr_provider_SAPHanaSR]") + section_start = ( + global_ini_content.index("[ha_dr_provider_sushanasr]") + if self.saphanasr_provider == HanaSRProvider.ANGI + else global_ini_content.index("[ha_dr_provider_SAPHanaSR]") + ) properties_slice = global_ini_content[section_start + 1 : section_start + 4] global_ini_properties = { @@ -420,6 +431,10 @@ def _parse_global_ini_parameters(self): if isinstance(expected_value, list): if value in expected_value: expected_value = value + self.log( + logging.INFO, + f"param_name: {param_name}, value: {value}, expected_value: {expected_value}", + ) parameters.append( self._create_parameter( category="global_ini", @@ -569,7 +584,12 @@ def parse_ha_cluster_config(self): elif self.category == "resources": try: - for sub_category, xpath in self.RESOURCE_CATEGORIES.items(): + resource_categories = self.RESOURCE_CATEGORIES.copy() + if self.saphanasr_provider == HanaSRProvider.ANGI: + resource_categories.pop("topology", None) + else: + resource_categories.pop("angi_topology", None) + for sub_category, xpath in resource_categories.items(): elements = root.findall(xpath) for element in elements: parameters.extend(self._parse_resource(element, sub_category)) @@ -620,22 +640,26 @@ def main() -> None: argument_spec=dict( sid=dict(type="str"), instance_number=dict(type="str"), - ansible_os_family=dict(type="str"), virtual_machine_name=dict(type="str"), fencing_mechanism=dict(type="str"), os_version=dict(type="str"), pcmk_constants=dict(type="dict"), + saphanasr_provider=dict(type="str"), + filter=dict(type="str", required=False, default="os_family"), ) ) validator = HAClusterValidator( - os_type=module.params["ansible_os_family"], + os_type=OperatingSystemFamily( + str(ansible_facts(module).get("os_family", "UNKNOWN")).upper() + ), os_version=module.params["os_version"], instance_number=module.params["instance_number"], sid=module.params["sid"], virtual_machine_name=module.params["virtual_machine_name"], fencing_mechanism=module.params["fencing_mechanism"], constants=module.params["pcmk_constants"], + saphanasr_provider=HanaSRProvider(module.params["saphanasr_provider"]), ) module.exit_json(**validator.get_result()) diff --git a/src/modules/get_pcmk_properties_scs.py b/src/modules/get_pcmk_properties_scs.py index 6fbbf969..b0cd5063 100644 --- a/src/modules/get_pcmk_properties_scs.py +++ b/src/modules/get_pcmk_properties_scs.py @@ -12,20 +12,15 @@ """ from ansible.module_utils.basic import AnsibleModule +from ansible.module_utils.facts.compat import ansible_facts try: - from ansible.module_utils.sap_automation_qa import ( - SapAutomationQA, - TestStatus, - Parameters, - ) + from ansible.module_utils.sap_automation_qa import SapAutomationQA + from ansible.module_utils.enums import OperatingSystemFamily, Parameters, TestStatus from ansible.module_utils.commands import CIB_ADMIN except ImportError: - from src.module_utils.sap_automation_qa import ( - SapAutomationQA, - TestStatus, - Parameters, - ) + from src.module_utils.sap_automation_qa import SapAutomationQA + from src.module_utils.enums import OperatingSystemFamily, Parameters, TestStatus from src.module_utils.commands import CIB_ADMIN @@ -55,11 +50,6 @@ - SAP ERS instance number type: str required: true - ansible_os_family: - description: - - Operating system family (redhat, suse, etc.) - type: str - required: true virtual_machine_name: description: - Name of the virtual machine @@ -98,7 +88,6 @@ sid: "S4D" ascs_instance_number: "00" ers_instance_number: "10" - ansible_os_family: "{{ ansible_os_family|lower }}" virtual_machine_name: "{{ ansible_hostname }}" pcmk_constants: "{{ pcmk_validation_constants }}" fencing_mechanism: "sbd" @@ -194,18 +183,18 @@ class HAClusterValidator(SapAutomationQA): def __init__( self, - os_type, - sid, - scs_instance_number, - ers_instance_number, - virtual_machine_name, - constants, - fencing_mechanism, + os_type: OperatingSystemFamily, + sid: str, + scs_instance_number: str, + ers_instance_number: str, + virtual_machine_name: str, + constants: dict, + fencing_mechanism: str, nfs_provider=None, category=None, ): super().__init__() - self.os_type = os_type + self.os_type = os_type.value.upper() self.category = category self.sid = sid self.scs_instance_number = scs_instance_number @@ -586,11 +575,11 @@ def main() -> None: sid=dict(type="str"), ascs_instance_number=dict(type="str"), ers_instance_number=dict(type="str"), - ansible_os_family=dict(type="str"), virtual_machine_name=dict(type="str"), pcmk_constants=dict(type="dict"), fencing_mechanism=dict(type="str"), nfs_provider=dict(type="str", default=""), + filter=dict(type="str", required=False, default="os_family"), ) ) @@ -598,7 +587,9 @@ def main() -> None: sid=module.params["sid"], scs_instance_number=module.params["ascs_instance_number"], ers_instance_number=module.params["ers_instance_number"], - os_type=module.params["ansible_os_family"], + os_type=OperatingSystemFamily( + str(ansible_facts(module).get("os_family", "UNKNOWN")).upper() + ), virtual_machine_name=module.params["virtual_machine_name"], constants=module.params["pcmk_constants"], fencing_mechanism=module.params["fencing_mechanism"], diff --git a/src/modules/location_constraints.py b/src/modules/location_constraints.py index c5bb3ff6..71e510b2 100644 --- a/src/modules/location_constraints.py +++ b/src/modules/location_constraints.py @@ -8,13 +8,16 @@ import xml.etree.ElementTree as ET from typing import List from ansible.module_utils.basic import AnsibleModule +from ansible.module_utils.facts.compat import ansible_facts try: - from ansible.module_utils.sap_automation_qa import SapAutomationQA, TestStatus + from ansible.module_utils.sap_automation_qa import SapAutomationQA from ansible.module_utils.commands import RSC_CLEAR, CONSTRAINTS + from ansible.module_utils.enums import OperatingSystemFamily, TestStatus except ImportError: - from src.module_utils.sap_automation_qa import SapAutomationQA, TestStatus + from src.module_utils.sap_automation_qa import SapAutomationQA from src.module_utils.commands import RSC_CLEAR, CONSTRAINTS + from src.module_utils.enums import OperatingSystemFamily, TestStatus DOCUMENTATION = r""" @@ -33,12 +36,6 @@ type: str required: true choices: ['remove'] - ansible_os_family: - description: - - Operating system family (redhat, suse, etc.) - - Used to determine the appropriate command format for the OS - type: str - required: true author: - Microsoft Corporation notes: @@ -54,7 +51,6 @@ - name: Remove all location constraints location_constraints: action: "remove" - ansible_os_family: "{{ ansible_os_family|lower }}" register: constraints_result - name: Display constraint removal results @@ -96,7 +92,7 @@ class LocationConstraintsManager(SapAutomationQA): Class to manage the location constraints in a pacemaker cluster. """ - def __init__(self, ansible_os_family: str): + def __init__(self, ansible_os_family: OperatingSystemFamily): super().__init__() self.ansible_os_family = ansible_os_family self.result.update( @@ -139,7 +135,8 @@ def location_constraints_exists(self) -> List[ET.Element]: self.result["details"] = xml_output return ET.fromstring(xml_output).findall(".//rsc_location") if xml_output else [] except Exception as ex: - self.handle_exception(ex) + self.handle_error(ex) + return [] def run_module() -> None: @@ -149,14 +146,17 @@ def run_module() -> None: """ module_args = dict( action=dict(type="str", required=True), - ansible_os_family=dict(type="str", required=True), + filter=dict(type="str", required=False, default="os_family"), ) module = AnsibleModule(argument_spec=module_args, supports_check_mode=True) action = module.params["action"] - ansible_os_family = module.params["ansible_os_family"] - manager = LocationConstraintsManager(ansible_os_family) + manager = LocationConstraintsManager( + ansible_os_family=OperatingSystemFamily( + str(ansible_facts(module).get("os_family", "UNKNOWN")).upper() + ) + ) if module.check_mode: module.exit_json(**manager.get_result()) diff --git a/src/modules/log_parser.py b/src/modules/log_parser.py index 4c87cdf6..c55d61d5 100644 --- a/src/modules/log_parser.py +++ b/src/modules/log_parser.py @@ -8,11 +8,14 @@ import json from datetime import datetime from ansible.module_utils.basic import AnsibleModule +from ansible.module_utils.facts.compat import ansible_facts try: from ansible.module_utils.sap_automation_qa import SapAutomationQA, TestStatus + from ansible.module_utils.enums import OperatingSystemFamily except ImportError: - from src.module_utils.sap_automation_qa import SapAutomationQA, TestStatus + from src.module_utils.sap_automation_qa import SapAutomationQA + from src.module_utils.enums import OperatingSystemFamily, TestStatus DOCUMENTATION = r""" --- @@ -49,12 +52,6 @@ type: list required: false default: [] - ansible_os_family: - description: - - Operating system family (e.g., REDHAT, SUSE). - - Used to determine the appropriate log timestamp format. - type: str - required: true function: description: - Specifies the function to execute: "parse_logs" or "merge_logs". @@ -85,7 +82,6 @@ start_time: "{{ (ansible_date_time.iso8601 | to_datetime - '1 hour') | to_datetime('%Y-%m-%d %H:%M:%S') }}" end_time: "{{ ansible_date_time.iso8601 | to_datetime('%Y-%m-%d %H:%M:%S') }}" log_file: "/var/log/messages" - ansible_os_family: "{{ ansible_os_family|upper }}" register: parse_result - name: Display filtered log entries @@ -98,7 +94,6 @@ logs: - "[\"Jan 01 12:34:56 server1 pacemaker-controld: Notice: Resource SAPHana_HDB_00 started\"]" - "[\"Jan 01 12:35:00 server2 pacemaker-controld: Notice: Resource SAPHana_HDB_01 started\"]" - ansible_os_family: "REDHAT" register: merge_result - name: Display merged log entries @@ -195,8 +190,8 @@ def __init__( start_time: str, end_time: str, log_file: str, - ansible_os_family: str, - logs: list = None, + ansible_os_family: OperatingSystemFamily, + logs: list = list(), ): super().__init__() self.start_time = start_time @@ -244,13 +239,13 @@ def merge_logs(self) -> None: for log in parsed_logs: try: - if self.ansible_os_family == "REDHAT": + if self.ansible_os_family == OperatingSystemFamily.REDHAT: timestamp_str = " ".join(log.split()[:3]) log_time = datetime.strptime(timestamp_str, "%b %d %H:%M:%S") log_time = log_time.replace(year=datetime.now().year) all_logs.append((log_time, log)) - elif self.ansible_os_family == "SUSE": + elif self.ansible_os_family == OperatingSystemFamily.SUSE: timestamp_str = log.split(".")[0] log_time = datetime.strptime(timestamp_str, "%Y-%m-%dT%H:%M:%S") all_logs.append((log_time, log)) @@ -282,12 +277,12 @@ def parse_logs(self) -> None: with open(self.log_file, "r", encoding="utf-8") as file: for line in file: try: - if self.ansible_os_family == "REDHAT": + if self.ansible_os_family == OperatingSystemFamily.REDHAT: log_time = datetime.strptime( " ".join(line.split()[:3]), "%b %d %H:%M:%S" ) log_time = log_time.replace(year=start_dt.year) - elif self.ansible_os_family == "SUSE": + elif self.ansible_os_family == OperatingSystemFamily.SUSE: log_time = datetime.strptime(line.split(".")[0], "%Y-%m-%dT%H:%M:%S") else: continue @@ -323,18 +318,19 @@ def run_module() -> None: end_time=dict(type="str", required=False), log_file=dict(type="str", required=False, default="/var/log/messages"), keywords=dict(type="list", required=False, default=[]), - ansible_os_family=dict(type="str", required=True), function=dict(type="str", required=True, choices=["parse_logs", "merge_logs"]), logs=dict(type="list", required=False, default=[]), + filter=dict(type="str", required=False, default="os_family"), ) module = AnsibleModule(argument_spec=module_args, supports_check_mode=True) - parser = LogParser( start_time=module.params.get("start_time"), end_time=module.params.get("end_time"), log_file=module.params.get("log_file"), - ansible_os_family=module.params["ansible_os_family"], + ansible_os_family=OperatingSystemFamily( + str(ansible_facts(module).get("os_family", "UNKNOWN")).upper() + ), logs=module.params.get("logs"), ) if module.params["function"] == "parse_logs": diff --git a/src/modules/send_telemetry_data.py b/src/modules/send_telemetry_data.py index 668b407c..147c5764 100644 --- a/src/modules/send_telemetry_data.py +++ b/src/modules/send_telemetry_data.py @@ -20,17 +20,11 @@ from ansible.module_utils.basic import AnsibleModule try: - from ansible.module_utils.sap_automation_qa import ( - SapAutomationQA, - TestStatus, - TelemetryDataDestination, - ) + from ansible.module_utils.sap_automation_qa import SapAutomationQA + from ansible.module_utils.enums import TelemetryDataDestination, TestStatus except ImportError: - from src.module_utils.sap_automation_qa import ( - SapAutomationQA, - TestStatus, - TelemetryDataDestination, - ) + from src.module_utils.sap_automation_qa import SapAutomationQA + from src.module_utils.enums import TelemetryDataDestination, TestStatus DOCUMENTATION = r""" --- @@ -248,9 +242,9 @@ def send_telemetry_data_to_azuredataexplorer(self, telemetry_json_data: str) -> """ import pandas as pd - telemetry_json_data = json.loads(telemetry_json_data) + telemetry_json_dict = json.loads(telemetry_json_data) data_frame = pd.DataFrame( - [telemetry_json_data.values()], columns=telemetry_json_data.keys() + [telemetry_json_dict.values()], columns=telemetry_json_dict.keys() ) ingestion_properties = IngestionProperties( database=self.module_params["adx_database_name"], diff --git a/src/roles/ha_db_hana/tasks/block-network.yml b/src/roles/ha_db_hana/tasks/block-network.yml index e12c87b6..98b39c02 100644 --- a/src/roles/ha_db_hana/tasks/block-network.yml +++ b/src/roles/ha_db_hana/tasks/block-network.yml @@ -89,9 +89,10 @@ block: - name: "Test Execution: Validate HANA DB cluster status (Primary Node)" get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" register: cluster_status_test_execution_primary retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -114,9 +115,10 @@ - name: "Test Execution: Validate HANA DB cluster status 2" get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" register: cluster_status_post_primary retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -132,9 +134,10 @@ block: - name: "Test Execution: Validate HANA DB cluster status (Secondary Node)" get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" register: cluster_status_test_execution_secondary retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -148,9 +151,10 @@ - name: "Test Execution: Validate HANA DB cluster status 2" get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" register: cluster_status_post_secondary retries: "{{ default_retries }}" delay: "{{ default_delay }}" diff --git a/src/roles/ha_db_hana/tasks/files/constants.yaml b/src/roles/ha_db_hana/tasks/files/constants.yaml index 90305953..9d8a8380 100644 --- a/src/roles/ha_db_hana/tasks/files/constants.yaml +++ b/src/roles/ha_db_hana/tasks/files/constants.yaml @@ -99,6 +99,22 @@ RESOURCE_DEFAULTS: interval: ["0", "0s"] timeout: ["300", "300s"] + angi_topology: + meta_attributes: + clone-node-max: "1" + target-role: "Started" + interleave: "true" + operations: + monitor: + interval: ["50", "50s"] + timeout: ["600", "600s"] + start: + interval: ["0", "0s"] + timeout: ["600", "600s"] + stop: + interval: ["0", "0s"] + timeout: ["300", "300s"] + hana: meta_attributes: notify: "true" @@ -124,6 +140,34 @@ RESOURCE_DEFAULTS: monitor: timeout: ["700", "700s"] + angi_hana: + meta_attributes: + notify: "true" + clone-max: "2" + clone-node-max: "1" + target-role: "Started" + interleave: "true" + priority: "100" + instance_attributes: + PREFER_SITE_TAKEOVER: "true" + DUPLICATE_PRIMARY_TIMEOUT: "7200" + AUTOMATED_REGISTER: "true" + operations: + start: + interval: ["0", "0s"] + timeout: ["3600", "3600s"] + stop: + interval: ["0", "0s"] + timeout: ["3600", "3600s"] + promote: + interval: ["0", "0s"] + timeout: ["3600", "3600s"] + demote: + interval: ["0", "0s"] + timeout: ["320", "320s"] + monitor: + timeout: ["700", "700s"] + ipaddr: meta_attributes: target-role: "Started" @@ -147,9 +191,28 @@ RESOURCE_DEFAULTS: interval: ["0", "0s"] timeout: ["120", "120s"] + angi_filesystem: + meta_attributes: + clone-node-max: "1" + interleave: "true" + operations: + monitor: + interval: ["120", "120s"] + timeout: ["120", "120s"] + start: + interval: ["0", "0s"] + timeout: ["10", "10s"] + stop: + interval: ["0", "0s"] + timeout: ["20", "20s"] + azurelb: meta_attributes: resource-stickiness: "0" + operations: + monitor: + interval: ["10", "10s"] + timeout: ["20", "20s"] REDHAT: fence_agent: @@ -285,14 +348,19 @@ OS_PARAMETERS: # Reading the global.ini file to get the provider and path for the SAPHanaSR resource agent GLOBAL_INI: SUSE: - provider: "SAPHanaSR" - path: ["/usr/share/SAPHanaSR", "/hana/shared/myHooks"] - execution_order: "1" - + SAPHanaSR: + provider: "SAPHanaSR" + path: ["/usr/share/SAPHanaSR", "/hana/shared/myHooks"] + execution_order: "1" + SAPHanaSR-angi: + provider: "susHanaSR" + path: ["/usr/share/SAPHanaSR", "/hana/shared/myHooks"] + execution_order: "1" REDHAT: - provider: "SAPHanaSR" - path: ["/usr/share/SAPHanaSR/srHook", "/hana/shared/myHooks"] - execution_order: "1" + SAPHanaSR: + provider: "SAPHanaSR" + path: ["/usr/share/SAPHanaSR/srHook", "/hana/shared/myHooks"] + execution_order: "1" # === Azure Load Balancer === diff --git a/src/roles/ha_db_hana/tasks/fs-freeze.yml b/src/roles/ha_db_hana/tasks/fs-freeze.yml index 038a1075..5efa5cd0 100644 --- a/src/roles/ha_db_hana/tasks/fs-freeze.yml +++ b/src/roles/ha_db_hana/tasks/fs-freeze.yml @@ -55,9 +55,10 @@ - name: "Test Execution: Validate HANA DB cluster status 1" when: cluster_status_pre.AUTOMATED_REGISTER | lower == "true" get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" register: cluster_status_test_execution retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -70,9 +71,10 @@ block: - name: "Test Execution: Validate HANA DB cluster status 2" get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "post_failover" database_sid: "{{ db_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" register: cluster_status_post retries: "{{ default_retries }}" delay: "{{ default_delay }}" diff --git a/src/roles/ha_db_hana/tasks/ha-config.yml b/src/roles/ha_db_hana/tasks/ha-config.yml index 9241d4d5..de3aac75 100644 --- a/src/roles/ha_db_hana/tasks/ha-config.yml +++ b/src/roles/ha_db_hana/tasks/ha-config.yml @@ -21,6 +21,10 @@ become: true become_user: root block: + - name: "Get the SAPHanaSR provider" + when: (ansible_os_family | upper) == "SUSE" + ansible.builtin.include_tasks: "roles/misc/tasks/get-saphanasr-provider.yml" + - name: "Retrieve Virtual Machine name" ansible.builtin.uri: url: http://169.254.169.254/metadata/instance?api-version=2021-02-01 @@ -33,11 +37,11 @@ get_pcmk_properties_db: sid: "{{ db_sid | upper }}" instance_number: "{{ db_instance_number }}" - ansible_os_family: "{{ ansible_os_family | upper }}" virtual_machine_name: "{{ azure_instance_metadata.json.compute.name }}" fencing_mechanism: "{{ database_cluster_type }}" os_version: "{{ ansible_distribution_version }}" pcmk_constants: "{{ lookup('file', 'constants.yaml') | from_yaml }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" register: test_result - name: "Set the test case status to PASSED" diff --git a/src/roles/ha_db_hana/tasks/primary-crash-index.yml b/src/roles/ha_db_hana/tasks/primary-crash-index.yml index e94c5123..a3543ae5 100644 --- a/src/roles/ha_db_hana/tasks/primary-crash-index.yml +++ b/src/roles/ha_db_hana/tasks/primary-crash-index.yml @@ -19,7 +19,6 @@ become: true check_indexserver: database_sid: "{{ db_sid | upper }}" - ansible_os_family: "{{ ansible_os_family | lower }}" register: index_server_check # /*--------------------------------------------------------------------------- @@ -54,9 +53,10 @@ - name: "Test Execution: Validate HANA DB cluster status 1" when: cluster_status_pre.AUTOMATED_REGISTER == "true" get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" register: cluster_status_test_execution retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -67,9 +67,10 @@ - name: "Test Execution: Validate HANA DB cluster status" when: cluster_status_pre.AUTOMATED_REGISTER == "false" get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" register: cluster_status_test_execution retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -115,9 +116,10 @@ - name: "Test Execution: Validate HANA DB cluster status 2" get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "post_failover" database_sid: "{{ db_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" register: cluster_status_post retries: "{{ default_retries }}" delay: "{{ default_delay }}" diff --git a/src/roles/ha_db_hana/tasks/primary-echo-b.yml b/src/roles/ha_db_hana/tasks/primary-echo-b.yml index 76588e11..173acecb 100644 --- a/src/roles/ha_db_hana/tasks/primary-echo-b.yml +++ b/src/roles/ha_db_hana/tasks/primary-echo-b.yml @@ -45,9 +45,10 @@ - name: "Test Execution: Validate HANA DB cluster status 1" when: cluster_status_pre.AUTOMATED_REGISTER == "true" get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" retries: "{{ default_retries }}" delay: "{{ default_delay }}" register: cluster_status_test_execution @@ -58,9 +59,10 @@ - name: "Test Execution: Validate HANA DB cluster status" when: cluster_status_pre.AUTOMATED_REGISTER == "false" get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" retries: "{{ default_retries }}" delay: "{{ default_delay }}" register: cluster_status_test_execution @@ -102,9 +104,10 @@ - name: "Test Execution: Validate HANA DB cluster status 2" get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "post_failover" database_sid: "{{ db_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" register: cluster_status_post retries: "{{ default_retries }}" delay: "{{ default_delay }}" diff --git a/src/roles/ha_db_hana/tasks/primary-node-crash.yml b/src/roles/ha_db_hana/tasks/primary-node-crash.yml index 616a09bb..00089109 100644 --- a/src/roles/ha_db_hana/tasks/primary-node-crash.yml +++ b/src/roles/ha_db_hana/tasks/primary-node-crash.yml @@ -41,9 +41,10 @@ - name: "Test Execution: Validate HANA DB cluster status 1" get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" register: cluster_status_test_execution retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -83,9 +84,10 @@ - name: "Test execution: Validate HANA DB cluster status 2" get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "post_failover" database_sid: "{{ db_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" register: cluster_status_post retries: "{{ default_retries }}" delay: "{{ default_delay }}" diff --git a/src/roles/ha_db_hana/tasks/primary-node-kill.yml b/src/roles/ha_db_hana/tasks/primary-node-kill.yml index d727fa88..5eca4111 100644 --- a/src/roles/ha_db_hana/tasks/primary-node-kill.yml +++ b/src/roles/ha_db_hana/tasks/primary-node-kill.yml @@ -42,9 +42,10 @@ - name: "Test Execution: Validate HANA DB cluster status 1" when: cluster_status_pre.AUTOMATED_REGISTER == "true" get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" register: cluster_status_test_execution retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -57,9 +58,10 @@ block: - name: "Test Execution: Validate HANA DB cluster status" get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" register: cluster_status_test_execution retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -100,9 +102,10 @@ - name: "Test execution: Validate HANA DB cluster status 2" get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "post_failover" database_sid: "{{ db_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" register: cluster_status_post retries: "{{ default_retries }}" delay: "{{ default_delay }}" diff --git a/src/roles/ha_db_hana/tasks/resource-migration.yml b/src/roles/ha_db_hana/tasks/resource-migration.yml index d35c3f9e..0c609767 100644 --- a/src/roles/ha_db_hana/tasks/resource-migration.yml +++ b/src/roles/ha_db_hana/tasks/resource-migration.yml @@ -31,7 +31,28 @@ test_execution_start: "{{ now(utc=true, fmt='%Y-%m-%d %H:%M:%S') }}" test_execution_hostname: "{{ hostvars[cluster_status_pre.primary_node].ansible_hostname }}" + - name: "Test Execution: Get HANA resource id for saphanasr_angi" + block: + - name: "Test Execution: Get HANA resource id for saphanasr_angi" + when: saphanasr_provider | default('SAPHanaSR') == "SAPHanaSR-angi" + ansible.builtin.shell: >- + set -o pipefail && {{ commands + | selectattr('name','equalto','get_hana_resource_id_saphanasr_angi') + | map(attribute=(ansible_os_family|upper)) + | first + }} + args: + executable: /bin/bash + changed_when: false + register: hana_resource_id + failed_when: hana_resource_id.rc != 0 + + - name: "Test Execution: Set fact the hana_resource_name" + ansible.builtin.set_fact: + hana_resource_name: "{{ hana_resource_id.stdout }}" + - name: "Test Execution: Get HANA resource id" + when: saphanasr_provider | default('SAPHanaSR') == "SAPHanaSR" block: - name: "Try master resource ID" ansible.builtin.shell: >- @@ -79,9 +100,10 @@ - name: "Test Execution: Validate HANA DB cluster status 1" get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" register: cluster_status_test_execution retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -123,7 +145,6 @@ - name: "Test Execution: Remove any location_constraints" location_constraints: action: "remove" - ansible_os_family: "{{ ansible_os_family | upper}}" register: location_constraints_result # This is required because the cluster reports incorrect location constraints @@ -134,9 +155,10 @@ - name: "Test Execution: Validate HANA DB cluster status 2" get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" register: cluster_status_test_execution_1 retries: "{{ default_retries }}" delay: "{{ default_delay }}" diff --git a/src/roles/ha_db_hana/tasks/sbd-fencing.yml b/src/roles/ha_db_hana/tasks/sbd-fencing.yml index 1ea81653..aa794ec4 100644 --- a/src/roles/ha_db_hana/tasks/sbd-fencing.yml +++ b/src/roles/ha_db_hana/tasks/sbd-fencing.yml @@ -56,9 +56,10 @@ - name: "Test Execution: Validate HANA DB cluster status during stop operation." when: cluster_status_pre.AUTOMATED_REGISTER == "true" get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" retries: "{{ default_retries }}" delay: "{{ default_delay }}" register: cluster_status_test_execution @@ -71,9 +72,10 @@ block: - name: "Test Execution: Validate HANA DB cluster status 2" get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" register: cluster_status_test_post retries: "{{ default_retries }}" delay: "{{ default_delay }}" diff --git a/src/roles/ha_db_hana/tasks/secondary-crash-index.yml b/src/roles/ha_db_hana/tasks/secondary-crash-index.yml index c7d0a0bf..986c7d96 100644 --- a/src/roles/ha_db_hana/tasks/secondary-crash-index.yml +++ b/src/roles/ha_db_hana/tasks/secondary-crash-index.yml @@ -18,7 +18,6 @@ become: true check_indexserver: database_sid: "{{ db_sid | upper }}" - ansible_os_family: "{{ ansible_os_family | lower }}" register: index_server_check # /*--------------------------------------------------------------------------- @@ -54,9 +53,10 @@ block: - name: "Test Execution: Validate HANA DB cluster status" get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "test_execution" - ansible_os_family: "{{ ansible_os_family | upper }}" database_sid: "{{ db_sid | lower }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" register: cluster_status_test_execution retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -66,9 +66,10 @@ - name: "Test Execution: Validate HANA DB cluster status 2" get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "post_failover" database_sid: "{{ db_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" register: cluster_status_post retries: "{{ default_retries }}" delay: "{{ default_delay }}" diff --git a/src/roles/ha_db_hana/tasks/secondary-echo-b.yml b/src/roles/ha_db_hana/tasks/secondary-echo-b.yml index b6dbb560..1836717d 100644 --- a/src/roles/ha_db_hana/tasks/secondary-echo-b.yml +++ b/src/roles/ha_db_hana/tasks/secondary-echo-b.yml @@ -49,9 +49,10 @@ block: - name: "Test Execution: Validate HANA DB cluster status" get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" retries: "{{ default_retries }}" delay: "{{ default_delay }}" register: cluster_status_test_execution @@ -61,9 +62,10 @@ - name: "Test Execution: Validate HANA DB cluster status 2" get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "post_failover" database_sid: "{{ db_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" register: cluster_status_post retries: "{{ default_retries }}" delay: "{{ default_delay }}" diff --git a/src/roles/ha_db_hana/tasks/secondary-node-kill.yml b/src/roles/ha_db_hana/tasks/secondary-node-kill.yml index 86e24ad5..19d65184 100644 --- a/src/roles/ha_db_hana/tasks/secondary-node-kill.yml +++ b/src/roles/ha_db_hana/tasks/secondary-node-kill.yml @@ -47,9 +47,10 @@ block: - name: "Test Execution: Validate HANA DB cluster status" get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "test_execution" - ansible_os_family: "{{ ansible_os_family | upper }}" database_sid: "{{ db_sid | lower }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" register: cluster_status_test_execution retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -59,9 +60,10 @@ - name: "Test execution: Validate HANA DB cluster status 2" get_cluster_status_db: - operation_step: "post_failover" - ansible_os_family: "{{ ansible_os_family | upper }}" + db_instance_number: "{{ db_instance_number }}" + operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" register: cluster_status_post retries: "{{ default_retries }}" delay: "{{ default_delay }}" diff --git a/src/roles/ha_scs/tasks/ascs-migration.yml b/src/roles/ha_scs/tasks/ascs-migration.yml index 6844c7b8..c3891986 100644 --- a/src/roles/ha_scs/tasks/ascs-migration.yml +++ b/src/roles/ha_scs/tasks/ascs-migration.yml @@ -40,7 +40,6 @@ - name: "Test Execution: Validate SCS cluster status" get_cluster_status_scs: sap_sid: "{{ sap_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" register: cluster_status_test_execution retries: "{{ default_retries }}" delay: "{{ default_delay }}" diff --git a/src/roles/ha_scs/tasks/ascs-node-crash.yml b/src/roles/ha_scs/tasks/ascs-node-crash.yml index 46325d7d..ed74d558 100644 --- a/src/roles/ha_scs/tasks/ascs-node-crash.yml +++ b/src/roles/ha_scs/tasks/ascs-node-crash.yml @@ -49,7 +49,6 @@ - name: "Test Execution: Validate ASCS node has stopped" get_cluster_status_scs: sap_sid: "{{ sap_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" register: cluster_status_test_execution_pre retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -59,7 +58,6 @@ when: hostvars[cluster_status_pre.ascs_node].ensa2_check.stdout == "" get_cluster_status_scs: sap_sid: "{{ sap_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" register: cluster_status_test_execution retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -71,7 +69,6 @@ when: hostvars[cluster_status_pre.ascs_node].ensa2_check.stdout != "" get_cluster_status_scs: sap_sid: "{{ sap_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" register: cluster_status_test_execution retries: "{{ default_retries }}" delay: "{{ default_delay }}" diff --git a/src/roles/ha_scs/tasks/block-network.yml b/src/roles/ha_scs/tasks/block-network.yml index a561a16a..293865ea 100644 --- a/src/roles/ha_scs/tasks/block-network.yml +++ b/src/roles/ha_scs/tasks/block-network.yml @@ -87,7 +87,6 @@ - name: "Test Execution: Validate SCS cluster status" get_cluster_status_scs: sap_sid: "{{ sap_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" register: cluster_status_test_execution retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -118,7 +117,6 @@ - name: "Test Execution: Validate SCS cluster status" get_cluster_status_scs: sap_sid: "{{ sap_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" register: cluster_status_test_execution retries: "{{ default_retries }}" delay: "{{ default_delay }}" diff --git a/src/roles/ha_scs/tasks/ha-config.yml b/src/roles/ha_scs/tasks/ha-config.yml index 6a846045..3136f21b 100644 --- a/src/roles/ha_scs/tasks/ha-config.yml +++ b/src/roles/ha_scs/tasks/ha-config.yml @@ -24,7 +24,6 @@ sid: "{{ sap_sid | upper }}" ascs_instance_number: "{{ scs_instance_number }}" ers_instance_number: "{{ ers_instance_number }}" - ansible_os_family: "{{ ansible_os_family | upper }}" virtual_machine_name: "{{ azure_instance_metadata.json.compute.name }}" pcmk_constants: "{{ lookup('file', 'constants.yaml') | from_yaml }}" fencing_mechanism: "{{ scs_cluster_type }}" diff --git a/src/roles/ha_scs/tasks/ha-failover-to-node.yml b/src/roles/ha_scs/tasks/ha-failover-to-node.yml index a14c47c3..0f86123d 100644 --- a/src/roles/ha_scs/tasks/ha-failover-to-node.yml +++ b/src/roles/ha_scs/tasks/ha-failover-to-node.yml @@ -46,7 +46,6 @@ - name: "Test Execution: Validate SCS cluster status" get_cluster_status_scs: sap_sid: "{{ sap_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" register: cluster_status_test_execution retries: "{{ default_retries }}" delay: "{{ default_delay }}" diff --git a/src/roles/ha_scs/tasks/kill-enqueue-replication.yml b/src/roles/ha_scs/tasks/kill-enqueue-replication.yml index b181f4cb..d99f1009 100644 --- a/src/roles/ha_scs/tasks/kill-enqueue-replication.yml +++ b/src/roles/ha_scs/tasks/kill-enqueue-replication.yml @@ -51,7 +51,6 @@ - name: "Test Execution: Validate ERS node is not running" get_cluster_status_scs: sap_sid: "{{ sap_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" register: cluster_status_test_execution_pre retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -65,7 +64,6 @@ - name: "Test Execution: Validate SCS cluster status" get_cluster_status_scs: sap_sid: "{{ sap_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" register: cluster_status_test_execution retries: "{{ default_retries }}" delay: "{{ default_delay }}" diff --git a/src/roles/ha_scs/tasks/kill-enqueue-server.yml b/src/roles/ha_scs/tasks/kill-enqueue-server.yml index 8c0d811d..4fb810dd 100644 --- a/src/roles/ha_scs/tasks/kill-enqueue-server.yml +++ b/src/roles/ha_scs/tasks/kill-enqueue-server.yml @@ -51,7 +51,6 @@ - name: "Test Execution: Validate ASCS node has stopped" get_cluster_status_scs: sap_sid: "{{ sap_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" register: cluster_status_test_execution_pre retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -66,7 +65,6 @@ when: ensa2_check.stdout == "0" get_cluster_status_scs: sap_sid: "{{ sap_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" register: cluster_status_test_execution retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -78,7 +76,6 @@ when: ensa2_check.stdout != "0" get_cluster_status_scs: sap_sid: "{{ sap_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" register: cluster_status_test_execution retries: "{{ default_retries }}" delay: "{{ default_delay }}" diff --git a/src/roles/ha_scs/tasks/kill-message-server.yml b/src/roles/ha_scs/tasks/kill-message-server.yml index dcda2d70..36a4a8ca 100644 --- a/src/roles/ha_scs/tasks/kill-message-server.yml +++ b/src/roles/ha_scs/tasks/kill-message-server.yml @@ -47,7 +47,6 @@ - name: "Test Execution: Validate ASCS node has stopped" get_cluster_status_scs: sap_sid: "{{ sap_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" register: cluster_status_test_execution_pre retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -57,7 +56,6 @@ when: ensa2_check.stdout == "" get_cluster_status_scs: sap_sid: "{{ sap_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" register: cluster_status_test_execution retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -69,7 +67,6 @@ when: ensa2_check.stdout != "1" get_cluster_status_scs: sap_sid: "{{ sap_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" register: cluster_status_test_execution retries: "{{ default_retries }}" delay: "{{ default_delay }}" diff --git a/src/roles/ha_scs/tasks/kill-sapstartsrv-process.yml b/src/roles/ha_scs/tasks/kill-sapstartsrv-process.yml index f1a63db8..d9703a36 100644 --- a/src/roles/ha_scs/tasks/kill-sapstartsrv-process.yml +++ b/src/roles/ha_scs/tasks/kill-sapstartsrv-process.yml @@ -64,7 +64,6 @@ - name: "Test Execution: Validate SCS cluster status" get_cluster_status_scs: sap_sid: "{{ sap_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" register: cluster_status_test_execution retries: "{{ default_retries }}" delay: "{{ default_delay }}" diff --git a/src/roles/ha_scs/tasks/manual-restart.yml b/src/roles/ha_scs/tasks/manual-restart.yml index 580d3f77..66f5b6d2 100644 --- a/src/roles/ha_scs/tasks/manual-restart.yml +++ b/src/roles/ha_scs/tasks/manual-restart.yml @@ -46,7 +46,6 @@ - name: "Test Execution: Validate SCS cluster status 1" get_cluster_status_scs: sap_sid: "{{ sap_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" register: cluster_status_test_execution retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -74,7 +73,6 @@ - name: "Test Execution: Validate SCS cluster status 2" get_cluster_status_scs: sap_sid: "{{ sap_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" register: cluster_status_test_execution_2 retries: "{{ default_retries }}" delay: "{{ default_delay }}" diff --git a/src/roles/misc/tasks/cluster-report.yml b/src/roles/misc/tasks/cluster-report.yml index a7d844bd..f37a8440 100644 --- a/src/roles/misc/tasks/cluster-report.yml +++ b/src/roles/misc/tasks/cluster-report.yml @@ -8,14 +8,16 @@ - name: "Get the cluster status" become: true get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "cluster_report_collection" database_sid: "{{ db_sid | lower | default('') }}" - ansible_os_family: "{{ ansible_os_family | upper }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" register: cluster_status failed_when: cluster_status.primary_node == "" - name: "Get the cluster report from the primary node" become: true + run_once: true when: - cluster_status is defined - cluster_status.primary_node == ansible_hostname diff --git a/src/roles/misc/tasks/get-saphanasr-provider.yml b/src/roles/misc/tasks/get-saphanasr-provider.yml new file mode 100644 index 00000000..c8a4f3cd --- /dev/null +++ b/src/roles/misc/tasks/get-saphanasr-provider.yml @@ -0,0 +1,29 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +# /*--------------------------------------------------------------------------- +# | Get SAPHanaSR Provider (on SUSE only) | +# +--------------------------------------------------------------------------*/ + +- name: Get SAPHanaSR provider for SUSE + when: (ansible_os_family | upper) == "SUSE" + block: + - name: "Get SAPHanaSR Provider using command" + become: true + become_user: "{{ db_sid | lower }}adm" + ansible.builtin.command: "SAPHanaSR-manageProvider --sid {{ db_sid | upper }} --show --provider=sushanasr" + register: hanasr_command_output + changed_when: false + failed_when: false + args: + chdir: "/usr/sbin" + + - name: "Set SAPHanaSR-angi Provider" + when: hanasr_command_output.stdout != 0 + ansible.builtin.set_fact: + saphanasr_provider: "SAPHanaSR-angi" + + - name: "Set SAPHanaSR Provider" + when: hanasr_command_output.stdout == "" + ansible.builtin.set_fact: + saphanasr_provider: "SAPHanaSR" diff --git a/src/roles/misc/tasks/post-validations.yml b/src/roles/misc/tasks/post-validations.yml index f3ea15d9..c8a1dcec 100644 --- a/src/roles/misc/tasks/post-validations.yml +++ b/src/roles/misc/tasks/post-validations.yml @@ -34,7 +34,6 @@ - name: "Merge and sort logs from all nodes by timestamp" log_parser: function: "merge_logs" - ansible_os_family: "{{ ansible_os_family | upper }}" logs: - "{{ hostvars[primary_node]['var_log_messages_output'].filtered_logs | default('[]') }}" - "{{ hostvars[secondary_node]['var_log_messages_output'].filtered_logs | default('[]') }}" diff --git a/src/roles/misc/tasks/pre-validations-db.yml b/src/roles/misc/tasks/pre-validations-db.yml index 2f1d1033..614a58ef 100644 --- a/src/roles/misc/tasks/pre-validations-db.yml +++ b/src/roles/misc/tasks/pre-validations-db.yml @@ -12,16 +12,24 @@ become: true location_constraints: action: "remove" - ansible_os_family: "{{ ansible_os_family | upper}}" register: location_constraints_results + - name: "Pre validation: Get SAPHanaSR provider" + when: (ansible_os_family | upper) == "SUSE" + ansible.builtin.include_tasks: "roles/misc/tasks/get-saphanasr-provider.yml" + - name: "Pre Validation: Validate HANA DB cluster status on primary node" become: true get_cluster_status_db: + db_instance_number: "{{ db_instance_number }}" operation_step: "pre_failover" - ansible_os_family: "{{ ansible_os_family | upper }}" database_sid: "{{ db_sid | lower }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" register: cluster_status_pre + until: cluster_status_pre.primary_node != "" or + cluster_status_pre.secondary_node != "" + timeout: 5 + retries: 3 - name: "Pre Validation: CleanUp any failed resource" become: true diff --git a/src/roles/misc/tasks/pre-validations-scs.yml b/src/roles/misc/tasks/pre-validations-scs.yml index 660a8044..0df292c5 100644 --- a/src/roles/misc/tasks/pre-validations-scs.yml +++ b/src/roles/misc/tasks/pre-validations-scs.yml @@ -11,7 +11,6 @@ - name: "Pre Validation: Validate SCS cluster status on ASCS node" get_cluster_status_scs: sap_sid: "{{ sap_sid | lower }}" - ansible_os_family: "{{ ansible_os_family | upper }}" become: true register: cluster_status_pre diff --git a/src/roles/misc/tasks/rescue.yml b/src/roles/misc/tasks/rescue.yml index 01e4f788..99cf9115 100644 --- a/src/roles/misc/tasks/rescue.yml +++ b/src/roles/misc/tasks/rescue.yml @@ -40,7 +40,6 @@ delegate_to: localhost log_parser: function: "merge_logs" - ansible_os_family: "{{ ansible_os_family | upper }}" logs: - "{{ hostvars[first_node]['var_log_messages_output'].filtered_logs | default('[]') }}" - "{{ hostvars[second_node]['var_log_messages_output'].filtered_logs | default('[]') }}" diff --git a/src/roles/misc/tasks/var-log-messages.yml b/src/roles/misc/tasks/var-log-messages.yml index bbda82b2..aa8ecb0f 100644 --- a/src/roles/misc/tasks/var-log-messages.yml +++ b/src/roles/misc/tasks/var-log-messages.yml @@ -12,6 +12,5 @@ log_parser: start_time: "{{ test_execution_start | default(test_case_start_time_epoch) }}" end_time: "{{ now(utc=true, fmt='%Y-%m-%d %H:%M:%S') }}" - ansible_os_family: "{{ ansible_os_family | upper }}" function: "parse_logs" register: var_log_messages_output diff --git a/src/vars/input-api.yaml b/src/vars/input-api.yaml index ce6e527c..5867ebf9 100644 --- a/src/vars/input-api.yaml +++ b/src/vars/input-api.yaml @@ -253,6 +253,9 @@ commands: SUSE: "cibadmin --query --xpath \"//primitive[@type='SAPHana']\" --node-path | grep -oP \"master\\[@id='\\K[^']+\"" REDHAT: "cibadmin --query --xpath \"//primitive[@type='SAPHana']\" --node-path | grep -oP \"clone\\[@id='\\K[^']+\"" + - name: get_hana_resource_id_saphanasr_angi + SUSE: "cibadmin --query --xpath \"//primitive[@type='SAPHanaController']\" --node-path | grep -oP \"primitive\\[@id='\\K[^']+\"" + - name: resource_migration_cmd SUSE: "crm resource move {{ hana_resource_name | default('msl_SAPHana_' ~ (db_sid | upper) ~ '_HDB' ~ db_instance_number) }} {{ cluster_status_pre.secondary_node | default('') }} force" REDHAT: "pcs resource move {{ hana_resource_name | default('SAPHana_' ~ (db_sid | upper) ~ '_' ~ db_instance_number ~ '-clone') }} --master" diff --git a/tests/module_utils/filter_tests_test.py b/tests/module_utils/filter_tests_test.py new file mode 100644 index 00000000..6556cd56 --- /dev/null +++ b/tests/module_utils/filter_tests_test.py @@ -0,0 +1,424 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +""" +Unit tests for the filter_tests module. +""" + +import json +import tempfile +import os +import pytest +import yaml +from src.module_utils.filter_tests import TestFilter + + +class TestTestFilter: + """ + Test class for the TestFilter class. + """ + + @pytest.fixture + def sample_config(self): + """ + Fixture providing sample test configuration data. + + :return: Sample configuration dictionary + :rtype: dict + """ + return { + "sap_functional_test_type_map": [ + {"name": "DatabaseHighAvailability", "value": "HA_DB"}, + {"name": "CentralServicesHighAvailability", "value": "HA_SCS"}, + ], + "test_groups": [ + { + "name": "HA_DB_HANA", + "test_cases": [ + { + "name": "HA Parameters Validation", + "task_name": "ha-config", + "description": "Validates HA configuration", + "enabled": True, + }, + { + "name": "Azure Load Balancer Validation", + "task_name": "azure-lb", + "description": "Validates Azure LB setup", + "enabled": True, + }, + { + "name": "Primary Node Crash", + "task_name": "primary-node-crash", + "description": "Simulates primary node crash", + "enabled": True, + }, + ], + }, + { + "name": "HA_SCS", + "test_cases": [ + { + "name": "SAPControl Config Validation", + "task_name": "sapcontrol-config", + "description": "Validates SAPControl config", + "enabled": True, + }, + { + "name": "ASCS Node Crash", + "task_name": "ascs-node-crash", + "description": "Simulates ASCS node crash", + "enabled": True, + }, + ], + }, + ], + "sap_sid": "HDB", + "db_sid": "HDB", + "default_retries": 50, + } + + @pytest.fixture + def temp_yaml_file(self, sample_config): + """ + Fixture providing a temporary YAML file with sample configuration. + + :param sample_config: Sample configuration data + :type sample_config: dict + :return: Path to temporary YAML file + :rtype: str + """ + with tempfile.NamedTemporaryFile(mode="w", suffix=".yaml", delete=False) as f: + yaml.dump(sample_config, f) + return f.name + + def test_init_with_valid_file(self, temp_yaml_file, sample_config): + """ + Test initialization with a valid YAML file. + + :param temp_yaml_file: Path to temporary YAML file + :type temp_yaml_file: str + :param sample_config: Expected configuration data + :type sample_config: dict + """ + try: + filter_obj = TestFilter(temp_yaml_file) + assert filter_obj.input_file == temp_yaml_file + assert filter_obj.config == sample_config + finally: + os.unlink(temp_yaml_file) + + def test_init_with_nonexistent_file(self, capsys): + """ + Test initialization with a non-existent file. + + :param capsys: Pytest fixture to capture stdout/stderr + :type capsys: pytest.CaptureFixture + """ + with pytest.raises(SystemExit) as exc_info: + TestFilter("nonexistent_file.yaml") + assert exc_info.value.code == 1 + captured = capsys.readouterr() + assert "Error: Configuration file nonexistent_file.yaml not found" in captured.err + + def test_init_with_invalid_yaml(self, capsys): + """ + Test initialization with an invalid YAML file. + + :param capsys: Pytest fixture to capture stdout/stderr + :type capsys: pytest.CaptureFixture + """ + with tempfile.NamedTemporaryFile(mode="w", suffix=".yaml", delete=False) as f: + f.write("invalid: yaml: content: [unclosed") + temp_file = f.name + try: + with pytest.raises(SystemExit) as exc_info: + TestFilter(temp_file) + assert exc_info.value.code == 1 + captured = capsys.readouterr() + assert f"Error parsing YAML file {temp_file}" in captured.err + finally: + os.unlink(temp_file) + + def test_filter_tests_no_filters(self, temp_yaml_file, sample_config): + """ + Test filter_tests with no filters applied. + + :param temp_yaml_file: Path to temporary YAML file + :type temp_yaml_file: str + :param sample_config: Expected configuration data + :type sample_config: dict + """ + try: + filter_obj = TestFilter(temp_yaml_file) + result = filter_obj.filter_tests() + result_dict = json.loads(result) + assert result_dict == sample_config + finally: + os.unlink(temp_yaml_file) + + def test_filter_tests_by_group(self, temp_yaml_file): + """ + Test filter_tests with a specific test group. + + :param temp_yaml_file: Path to temporary YAML file + :type temp_yaml_file: str + """ + try: + filter_obj = TestFilter(temp_yaml_file) + result = filter_obj.filter_tests(test_group="HA_DB_HANA") + result_dict = json.loads(result) + ha_db_group = next(g for g in result_dict["test_groups"] if g["name"] == "HA_DB_HANA") + for test_case in ha_db_group["test_cases"]: + assert test_case["enabled"] is True + ha_scs_group = next(g for g in result_dict["test_groups"] if g["name"] == "HA_SCS") + for test_case in ha_scs_group["test_cases"]: + assert test_case["enabled"] is False + finally: + os.unlink(temp_yaml_file) + + def test_filter_tests_by_cases(self, temp_yaml_file): + """ + Test filter_tests with specific test cases. + + :param temp_yaml_file: Path to temporary YAML file + :type temp_yaml_file: str + """ + try: + filter_obj = TestFilter(temp_yaml_file) + result = filter_obj.filter_tests(test_cases=["ha-config", "ascs-node-crash"]) + result_dict = json.loads(result) + for group in result_dict["test_groups"]: + for test_case in group["test_cases"]: + if test_case["task_name"] in ["ha-config", "ascs-node-crash"]: + assert test_case["enabled"] is True + else: + assert test_case["enabled"] is False + finally: + os.unlink(temp_yaml_file) + + def test_filter_tests_by_group_and_cases(self, temp_yaml_file): + """ + Test filter_tests with both test group and specific test cases. + + :param temp_yaml_file: Path to temporary YAML file + :type temp_yaml_file: str + """ + try: + filter_obj = TestFilter(temp_yaml_file) + result = filter_obj.filter_tests( + test_group="HA_DB_HANA", test_cases=["ha-config", "azure-lb"] + ) + result_dict = json.loads(result) + ha_db_group = next(g for g in result_dict["test_groups"] if g["name"] == "HA_DB_HANA") + assert len(ha_db_group["test_cases"]) == 2 + expected_tasks = {"ha-config", "azure-lb"} + actual_tasks = {tc["task_name"] for tc in ha_db_group["test_cases"]} + assert actual_tasks == expected_tasks + for test_case in ha_db_group["test_cases"]: + assert test_case["enabled"] is True + finally: + os.unlink(temp_yaml_file) + + def test_get_ansible_vars_no_filters(self, temp_yaml_file, sample_config): + """ + Test get_ansible_vars with no filters applied. + + :param temp_yaml_file: Path to temporary YAML file + :type temp_yaml_file: str + :param sample_config: Expected configuration data + :type sample_config: dict + """ + try: + filter_obj = TestFilter(temp_yaml_file) + result = filter_obj.get_ansible_vars() + result_dict = json.loads(result) + assert "test_groups" in result_dict + assert result_dict["test_groups"] == sample_config["test_groups"] + finally: + os.unlink(temp_yaml_file) + + def test_get_ansible_vars_with_filters(self, temp_yaml_file): + """ + Test get_ansible_vars with filters applied. + + :param temp_yaml_file: Path to temporary YAML file + :type temp_yaml_file: str + """ + try: + filter_obj = TestFilter(temp_yaml_file) + result = filter_obj.get_ansible_vars(test_group="HA_SCS") + result_dict = json.loads(result) + assert "test_groups" in result_dict + ha_scs_group = next(g for g in result_dict["test_groups"] if g["name"] == "HA_SCS") + for test_case in ha_scs_group["test_cases"]: + assert test_case["enabled"] is True + ha_db_group = next(g for g in result_dict["test_groups"] if g["name"] == "HA_DB_HANA") + for test_case in ha_db_group["test_cases"]: + assert test_case["enabled"] is False + finally: + os.unlink(temp_yaml_file) + + def test_main_function_insufficient_args(self, monkeypatch, capsys): + """ + Test main function with insufficient arguments. + + :param monkeypatch: Pytest monkeypatch fixture + :type monkeypatch: pytest.MonkeyPatch + :param capsys: Pytest fixture to capture stdout/stderr + :type capsys: pytest.CaptureFixture + """ + with monkeypatch.context() as m: + m.setattr("sys.argv", ["filter_tests.py"]) + with pytest.raises(SystemExit) as exc_info: + from src.module_utils.filter_tests import main + + main() + assert exc_info.value.code == 1 + captured = capsys.readouterr() + assert "Usage: python filter_tests.py" in captured.err + + def test_main_function_with_input_file_only(self, monkeypatch, temp_yaml_file, capsys): + """ + Test main function with only input file argument. + + :param monkeypatch: Pytest monkeypatch fixture + :type monkeypatch: pytest.MonkeyPatch + :param temp_yaml_file: Path to temporary YAML file + :type temp_yaml_file: str + :param capsys: Pytest fixture to capture stdout/stderr + :type capsys: pytest.CaptureFixture + """ + try: + with monkeypatch.context() as m: + m.setattr("sys.argv", ["filter_tests.py", temp_yaml_file]) + from src.module_utils.filter_tests import main + + main() + captured = capsys.readouterr() + result = json.loads(captured.out) + assert "test_groups" in result + finally: + os.unlink(temp_yaml_file) + + def test_main_function_with_test_group(self, monkeypatch, temp_yaml_file, capsys): + """ + Test main function with test group specified. + + :param monkeypatch: Pytest monkeypatch fixture + :type monkeypatch: pytest.MonkeyPatch + :param temp_yaml_file: Path to temporary YAML file + :type temp_yaml_file: str + :param capsys: Pytest fixture to capture stdout/stderr + :type capsys: pytest.CaptureFixture + """ + try: + with monkeypatch.context() as m: + m.setattr("sys.argv", ["filter_tests.py", temp_yaml_file, "HA_DB_HANA"]) + from src.module_utils.filter_tests import main + + main() + captured = capsys.readouterr() + result = json.loads(captured.out) + assert "test_groups" in result + finally: + os.unlink(temp_yaml_file) + + def test_main_function_with_test_cases(self, monkeypatch, temp_yaml_file, capsys): + """ + Test main function with test cases specified. + + :param monkeypatch: Pytest monkeypatch fixture + :type monkeypatch: pytest.MonkeyPatch + :param temp_yaml_file: Path to temporary YAML file + :type temp_yaml_file: str + :param capsys: Pytest fixture to capture stdout/stderr + :type capsys: pytest.CaptureFixture + """ + try: + with monkeypatch.context() as m: + m.setattr( + "sys.argv", ["filter_tests.py", temp_yaml_file, "null", "ha-config,azure-lb"] + ) + from src.module_utils.filter_tests import main + + main() + captured = capsys.readouterr() + result = json.loads(captured.out) + assert "test_groups" in result + finally: + os.unlink(temp_yaml_file) + + def test_main_function_with_null_values(self, monkeypatch, temp_yaml_file, capsys): + """ + Test main function with null values. + + :param monkeypatch: Pytest monkeypatch fixture + :type monkeypatch: pytest.MonkeyPatch + :param temp_yaml_file: Path to temporary YAML file + :type temp_yaml_file: str + :param capsys: Pytest fixture to capture stdout/stderr + :type capsys: pytest.CaptureFixture + """ + try: + with monkeypatch.context() as m: + m.setattr("sys.argv", ["filter_tests.py", temp_yaml_file, "null", "null"]) + from src.module_utils.filter_tests import main + + main() + captured = capsys.readouterr() + result = json.loads(captured.out) + assert "test_groups" in result + finally: + os.unlink(temp_yaml_file) + + def test_filter_tests_nonexistent_group(self, temp_yaml_file, sample_config): + """ + Test filter_tests with a non-existent test group. + + :param temp_yaml_file: Path to temporary YAML file + :type temp_yaml_file: str + :param sample_config: Sample configuration data + :type sample_config: dict + """ + try: + filter_obj = TestFilter(temp_yaml_file) + result = filter_obj.filter_tests(test_group="NONEXISTENT_GROUP") + result_dict = json.loads(result) + for group in result_dict["test_groups"]: + for test_case in group["test_cases"]: + assert test_case["enabled"] is False + finally: + os.unlink(temp_yaml_file) + + def test_filter_tests_nonexistent_cases(self, temp_yaml_file): + """ + Test filter_tests with non-existent test cases. + + :param temp_yaml_file: Path to temporary YAML file + :type temp_yaml_file: str + """ + try: + filter_obj = TestFilter(temp_yaml_file) + result = filter_obj.filter_tests(test_cases=["nonexistent-case"]) + result_dict = json.loads(result) + for group in result_dict["test_groups"]: + for test_case in group["test_cases"]: + assert test_case["enabled"] is False + finally: + os.unlink(temp_yaml_file) + + def test_config_copy_independence(self, temp_yaml_file): + """ + Test that filtered configuration doesn't modify the original. + + :param temp_yaml_file: Path to temporary YAML file + :type temp_yaml_file: str + """ + try: + filter_obj = TestFilter(temp_yaml_file) + original_config = filter_obj.config.copy() + filter_obj.filter_tests(test_group="HA_DB_HANA") + assert filter_obj.config == original_config + finally: + os.unlink(temp_yaml_file) diff --git a/tests/module_utils/get_cluster_status_test.py b/tests/module_utils/get_cluster_status_test.py index 98c5eb6e..e601ddfa 100644 --- a/tests/module_utils/get_cluster_status_test.py +++ b/tests/module_utils/get_cluster_status_test.py @@ -5,10 +5,12 @@ Unit tests for the get_cluster_status module. """ +import logging import xml.etree.ElementTree as ET from typing import Dict, Any import pytest from src.module_utils.get_cluster_status import BaseClusterStatusChecker +from src.module_utils.enums import OperatingSystemFamily class TestableBaseClusterChecker(BaseClusterStatusChecker): @@ -21,12 +23,12 @@ def __init__(self, ansible_os_family=""): self.test_ready = False self.test_stable = False - def _process_node_attributes(self, node_attributes: ET.Element) -> Dict[str, Any]: + def _process_node_attributes(self, cluster_status_xml: ET.Element) -> Dict[str, Any]: """ Process node attributes and return a dictionary with node information. - :param node_attributes: XML element containing node attributes. - :type node_attributes: ET.Element + :param cluster_status_xml: XML element containing cluster status. + :type cluster_status_xml: ET.Element :return: Dictionary with node information. :rtype: Dict[str, Any] """ @@ -64,9 +66,9 @@ def base_checker(self): :return: Instance of TestableBaseClusterChecker. :rtype: TestableBaseClusterChecker """ - return TestableBaseClusterChecker(ansible_os_family="REDHAT") + return TestableBaseClusterChecker(ansible_os_family=OperatingSystemFamily.REDHAT) - def test_get_stonith_action_rhel94(self, mocker, base_checker): + def test_get_stonith_action_rhel94(self, mocker, base_checker: TestableBaseClusterChecker): """ Test the _get_stonith_action method when the command executes successfully. @@ -88,7 +90,7 @@ def test_get_stonith_action_rhel94(self, mocker, base_checker): mock_execute.assert_called_once() assert base_checker.result["stonith_action"] == return_value - def test_get_stonith_action(self, mocker, base_checker): + def test_get_stonith_action(self, mocker, base_checker: TestableBaseClusterChecker): """ Test the _get_stonith_action method when the command executes successfully. @@ -110,7 +112,7 @@ def test_get_stonith_action(self, mocker, base_checker): mock_execute.assert_called_once() assert base_checker.result["stonith_action"] == return_value - def test_get_stonith_action_exception(self, mocker, base_checker): + def test_get_stonith_action_exception(self, mocker, base_checker: TestableBaseClusterChecker): """ Test the _get_stonith_action method when the command raises an exception. @@ -128,7 +130,9 @@ def test_get_stonith_action_exception(self, mocker, base_checker): mock_execute.assert_called_once() assert base_checker.result["stonith_action"] == "unknown" - def test_validate_cluster_basic_status_success(self, mocker, base_checker): + def test_validate_cluster_basic_status_success( + self, mocker, base_checker: TestableBaseClusterChecker + ): """ Test _validate_cluster_basic_status method with a successful cluster status. @@ -156,7 +160,9 @@ def test_validate_cluster_basic_status_success(self, mocker, base_checker): assert base_checker.result["pacemaker_status"] == "running" - def test_validate_cluster_basic_status_insufficient_nodes(self, mocker, base_checker): + def test_validate_cluster_basic_status_insufficient_nodes( + self, mocker, base_checker: TestableBaseClusterChecker + ): """ Test _validate_cluster_basic_status method with insufficient nodes. @@ -183,7 +189,9 @@ def test_validate_cluster_basic_status_insufficient_nodes(self, mocker, base_che assert "insufficient nodes" in base_checker.result["message"] - def test_validate_cluster_basic_status_offline_node(self, base_checker): + def test_validate_cluster_basic_status_offline_node( + self, base_checker: TestableBaseClusterChecker + ): """ Test _validate_cluster_basic_status method with an offline node. @@ -208,7 +216,7 @@ def test_validate_cluster_basic_status_offline_node(self, base_checker): assert "node2 is not online" in base_checker.result["message"] - def test_run_cluster_ready(self, mocker, base_checker): + def test_run_cluster_ready(self, mocker, base_checker: TestableBaseClusterChecker): """ Test the run method when the cluster is ready. @@ -245,3 +253,152 @@ def test_run_cluster_ready(self, mocker, base_checker): assert result["status"] == "PASSED" assert "end" in result + + def test_run_cluster_unstable(self, mocker, base_checker: TestableBaseClusterChecker): + """ + Test the run method when cluster is ready but not stable. + + :param mocker: Mocking library to patch methods. + :type mocker: mocker.MockerFixture + :param base_checker: Instance of TestableBaseClusterChecker. + :type base_checker: TestableBaseClusterChecker + """ + mocker.patch.object(base_checker, "execute_command_subprocess", return_value="reboot") + + base_checker.test_ready = True + base_checker.test_stable = False # Cluster is not stable + + result = base_checker.run() + + assert result["status"] == "PASSED" + assert "Pacemaker cluster isn't stable" in result["message"] + + def test_run_cluster_not_ready_initially( + self, mocker, base_checker: TestableBaseClusterChecker + ): + """ + Test the run method when cluster is not ready initially but becomes ready. + + :param mocker: Mocking library to patch methods. + :type mocker: mocker.MockerFixture + :param base_checker: Instance of TestableBaseClusterChecker. + :type base_checker: TestableBaseClusterChecker + """ + mock_execute = mocker.patch.object(base_checker, "execute_command_subprocess") + mock_execute.side_effect = [ + "reboot", + """ + + + + + + + + + + + + + """, + "active", + ] + + base_checker.test_ready = False + base_checker.test_stable = True + base_checker.max_ready_calls = 2 + + result = base_checker.run() + + assert result["status"] == "PASSED" + assert "end" in result + + def test_run_cluster_ready_immediately(self, mocker, base_checker: TestableBaseClusterChecker): + """ + Test the run method when the cluster is ready immediately. + + :param mocker: Mocking library to patch methods. + :type mocker: mocker.MockerFixture + :param base_checker: Instance of TestableBaseClusterChecker. + :type base_checker: TestableBaseClusterChecker + """ + mock_execute = mocker.patch.object( + base_checker, "execute_command_subprocess", return_value="reboot" + ) + + base_checker.test_ready = True + base_checker.test_stable = True + + result = base_checker.run() + + assert result["status"] == "PASSED" + assert "end" in result + assert mock_execute.call_count == 1 + + def test_run_method_exception_in_try_block( + self, mocker, base_checker: TestableBaseClusterChecker + ): + """ + Test run method when exception occurs in try block. + + :param mocker: Mocking library to patch methods. + :type mocker: mocker.MockerFixture + :param base_checker: Instance of TestableBaseClusterChecker. + :type base_checker: TestableBaseClusterChecker + """ + mocker.patch.object( + base_checker, "execute_command_subprocess", side_effect=Exception("Test exception") + ) + mock_handle_error = mocker.patch.object(base_checker, "handle_error") + mock_log = mocker.patch.object(base_checker, "log") + + result = base_checker.run() + mock_handle_error.assert_called_once() + + mock_log.assert_any_call(logging.INFO, "Starting cluster status check") + mock_log.assert_any_call(logging.INFO, "Cluster status check completed") + assert result["status"] == "PASSED" + assert "end" in result + + def test_run_method_while_loop_multiple_iterations( + self, mocker, base_checker: TestableBaseClusterChecker + ): + """ + Test run method with multiple while loop iterations. + + :param mocker: Mocking library to patch methods. + :type mocker: mocker.MockerFixture + :param base_checker: Instance of TestableBaseClusterChecker. + :type base_checker: TestableBaseClusterChecker + """ + cluster_xml = """ + + + + + + + + + + + + + """ + + mock_execute = mocker.patch.object(base_checker, "execute_command_subprocess") + mock_execute.side_effect = [ + "reboot", + cluster_xml, + "active", + cluster_xml, + "active", + ] + + base_checker.test_ready = False + base_checker.max_ready_calls = 3 + base_checker.test_stable = True + + result = base_checker.run() + + assert result["status"] == "PASSED" diff --git a/tests/module_utils/sap_automation_qa_test.py b/tests/module_utils/sap_automation_qa_test.py index f1f61d88..3672fcfe 100644 --- a/tests/module_utils/sap_automation_qa_test.py +++ b/tests/module_utils/sap_automation_qa_test.py @@ -6,7 +6,8 @@ """ import xml.etree.ElementTree as ET -from src.module_utils.sap_automation_qa import SapAutomationQA, TestStatus +from src.module_utils.sap_automation_qa import SapAutomationQA +from src.module_utils.enums import TestStatus class MockLogger: @@ -66,7 +67,6 @@ def test_init(self): Test the initialization of the SapAutomationQA class. """ sap_qa = SapAutomationQA() - assert sap_qa.result["status"] == TestStatus.NOT_STARTED.value assert sap_qa.result["message"] == "" assert not sap_qa.result["details"] assert not sap_qa.result["logs"] diff --git a/tests/modules/check_indexserver_test.py b/tests/modules/check_indexserver_test.py index 290e4ffd..a3863022 100644 --- a/tests/modules/check_indexserver_test.py +++ b/tests/modules/check_indexserver_test.py @@ -7,7 +7,7 @@ import io from src.modules.check_indexserver import IndexServerCheck, main -from src.module_utils.sap_automation_qa import TestStatus +from src.module_utils.enums import OperatingSystemFamily, TestStatus def fake_open_factory(file_content): @@ -54,7 +54,9 @@ def test_redhat_indexserver_success(self, monkeypatch): ] with monkeypatch.context() as monkey_patch: monkey_patch.setattr("builtins.open", fake_open_factory(file_lines)) - checker = IndexServerCheck(database_sid="TEST", os_distribution="redhat") + checker = IndexServerCheck( + database_sid="TEST", os_distribution=OperatingSystemFamily.REDHAT + ) checker.check_indexserver() result = checker.get_result() @@ -77,9 +79,17 @@ def test_suse_indexserver_success(self, monkeypatch): "path=/usr/share/SAPHanaSR", "dummy=dummy", ] + file_lines_angi = [ + "[ha_dr_provider_suschksrv]", + "provider=susChkSrv", + "path=/usr/share/SAPHanaSR", + "dummy=dummy", + ] with monkeypatch.context() as monkey_patch: monkey_patch.setattr("builtins.open", fake_open_factory(file_lines)) - checker = IndexServerCheck(database_sid="TEST", os_distribution="suse") + checker = IndexServerCheck( + database_sid="TEST", os_distribution=OperatingSystemFamily.SUSE + ) checker.check_indexserver() result = checker.get_result() @@ -89,12 +99,17 @@ def test_suse_indexserver_success(self, monkeypatch): assert "provider" in result["details"] assert "path" in result["details"] + monkey_patch.setattr("builtins.open", fake_open_factory(file_lines_angi)) + checker.check_indexserver() + result = checker.get_result() + assert result["status"] == TestStatus.SUCCESS.value + def test_unsupported_os(self): """ Test unsupported OS distribution. """ with io.StringIO() as _: - checker = IndexServerCheck(database_sid="TEST", os_distribution="windows") + checker = IndexServerCheck(database_sid="TEST", os_distribution="unsupported_os") checker.check_indexserver() result = checker.get_result() @@ -117,7 +132,9 @@ def test_indexserver_not_configured(self, monkeypatch): ] with monkeypatch.context() as monkey_patch: monkey_patch.setattr("builtins.open", fake_open_factory(file_lines)) - index_server_check = IndexServerCheck(database_sid="HDB", os_distribution="redhat") + index_server_check = IndexServerCheck( + database_sid="HDB", os_distribution=OperatingSystemFamily.REDHAT + ) index_server_check.check_indexserver() result = index_server_check.get_result() @@ -143,7 +160,9 @@ def fake_open(*args, **kwargs): with monkeypatch.context() as monkey_patch: monkey_patch.setattr("builtins.open", fake_open) - index_server_check = IndexServerCheck(database_sid="HDB", os_distribution="redhat") + index_server_check = IndexServerCheck( + database_sid="HDB", os_distribution=OperatingSystemFamily.REDHAT + ) index_server_check.check_indexserver() result = index_server_check.get_result() @@ -174,7 +193,6 @@ class MockAnsibleModule: def __init__(self, *args, **kwargs): self.params = { "database_sid": "TEST", - "ansible_os_family": "redhat", } def exit_json(self, **kwargs): @@ -184,8 +202,22 @@ def exit_json(self, **kwargs): nonlocal mock_result mock_result = kwargs + def mock_ansible_facts_suse(module): + """ + Mock function to return Ansible facts for Suse. + + :param module: Mock Ansible module instance. + :type module: MockAnsibleModule + :return: Dictionary with Suse facts. + :rtype: dict + """ + return {"os_family": "Suse", "distribution": "SLES", "ansible_os_family": "Suse"} + with monkeypatch.context() as monkey_patch: monkey_patch.setattr("src.modules.check_indexserver.AnsibleModule", MockAnsibleModule) monkey_patch.setattr("builtins.open", fake_open_factory(file_lines)) + monkey_patch.setattr( + "src.modules.check_indexserver.ansible_facts", mock_ansible_facts_suse + ) main() assert mock_result["status"] == TestStatus.ERROR.value diff --git a/tests/modules/get_cluster_status_db_test.py b/tests/modules/get_cluster_status_db_test.py index 42f36490..86340430 100644 --- a/tests/modules/get_cluster_status_db_test.py +++ b/tests/modules/get_cluster_status_db_test.py @@ -7,7 +7,11 @@ import xml.etree.ElementTree as ET import pytest -from src.modules.get_cluster_status_db import HanaClusterStatusChecker, run_module +from src.modules.get_cluster_status_db import ( + HanaClusterStatusChecker, + run_module, +) +from src.module_utils.enums import OperatingSystemFamily, HanaSRProvider class TestHanaClusterStatusChecker: @@ -16,58 +20,78 @@ class TestHanaClusterStatusChecker: """ @pytest.fixture - def hana_checker(self): + def hana_checker_classic(self): """ - Fixture for creating a HanaClusterStatusChecker instance. + Fixture for creating a HanaClusterStatusChecker instance with classic SAP HANA SR provider. :return: Instance of HanaClusterStatusChecker. :rtype: HanaClusterStatusChecker """ - return HanaClusterStatusChecker(database_sid="TEST", ansible_os_family="REDHAT") + return HanaClusterStatusChecker( + database_sid="TEST", + ansible_os_family=OperatingSystemFamily.REDHAT, + saphanasr_provider=HanaSRProvider.SAPHANASR, + db_instance_number="00", + ) - def test_get_automation_register(self, mocker, hana_checker): + @pytest.fixture + def hana_checker_angi(self): + """ + Fixture for creating a HanaClusterStatusChecker instance with ANGI SAP HANA SR provider. + + :return: Instance of HanaClusterStatusChecker. + :rtype: HanaClusterStatusChecker + """ + return HanaClusterStatusChecker( + database_sid="TEST", + ansible_os_family=OperatingSystemFamily.SUSE, + saphanasr_provider=HanaSRProvider.ANGI, + db_instance_number="00", + ) + + def test_get_automation_register(self, mocker, hana_checker_classic): """ Test the _get_automation_register method. :param mocker: Mocking library for Python. :type mocker: _mocker.MagicMock - :param hana_checker: Instance of HanaClusterStatusChecker. - :type hana_checker: HanaClusterStatusChecker + :param hana_checker_classic: Instance of HanaClusterStatusChecker. + :type hana_checker_classic: HanaClusterStatusChecker """ mocker.patch.object( - hana_checker, + hana_checker_classic, "execute_command_subprocess", return_value='', ) - hana_checker._get_automation_register() + hana_checker_classic._get_automation_register() - assert hana_checker.result["AUTOMATED_REGISTER"] == "true" + assert hana_checker_classic.result["AUTOMATED_REGISTER"] == "true" - def test_get_automation_register_exception(self, mocker, hana_checker): + def test_get_automation_register_exception(self, mocker, hana_checker_classic): """ Test the _get_automation_register method when an exception occurs. :param mocker: Mocking library for Python. :type mocker: _mocker.MagicMock - :param hana_checker: Instance of HanaClusterStatusChecker. - :type hana_checker: HanaClusterStatusChecker + :param hana_checker_classic: Instance of HanaClusterStatusChecker. + :type hana_checker_classic: HanaClusterStatusChecker """ mocker.patch.object( - hana_checker, "execute_command_subprocess", side_effect=Exception("Test error") + hana_checker_classic, "execute_command_subprocess", side_effect=Exception("Test error") ) - hana_checker._get_automation_register() + hana_checker_classic._get_automation_register() - assert hana_checker.result["AUTOMATED_REGISTER"] == "unknown" + assert hana_checker_classic.result["AUTOMATED_REGISTER"] == "unknown" - def test_process_node_attributes_primary_only(self, hana_checker): + def test_process_node_attributes_primary_only(self, hana_checker_classic): """ Test processing node attributes with only the primary node. - :param hana_checker: Instance of HanaClusterStatusChecker. - :type hana_checker: HanaClusterStatusChecker + :param hana_checker_classic: Instance of HanaClusterStatusChecker. + :type hana_checker_classic: HanaClusterStatusChecker """ xml_str = """ @@ -84,7 +108,7 @@ def test_process_node_attributes_primary_only(self, hana_checker): """ - result = hana_checker._process_node_attributes(ET.fromstring(xml_str)) + result = hana_checker_classic._process_node_attributes(ET.fromstring(xml_str)) assert result["primary_node"] == "node1" assert result["secondary_node"] == "" @@ -92,12 +116,73 @@ def test_process_node_attributes_primary_only(self, hana_checker): assert result["replication_mode"] == "syncmem" assert result["primary_site_name"] == "site1" - def test_process_node_attributes_both_nodes(self, hana_checker): + def test_process_node_attributes_primary_only_angi(self, hana_checker_angi): + """ + Test processing node attributes with only the primary node when using ANGI provider. + + :param hana_checker_angi: Instance of HanaClusterStatusChecker. + :type hana_checker_angi: HanaClusterStatusChecker + """ + + xml_str = """ + + + + + + + + + + + + """ + + result = hana_checker_angi._process_node_attributes(ET.fromstring(xml_str)) + + assert result["primary_node"] == "node1" + assert result["secondary_node"] == "" + assert result["primary_site_name"] == "SITEA" + + def test_process_node_attributes_both_nodes_angi(self, hana_checker_angi): """ Test processing node attributes with both primary and secondary nodes. - :param hana_checker: Instance of HanaClusterStatusChecker. - :type hana_checker: HanaClusterStatusChecker + :param hana_checker_angi: Instance of HanaClusterStatusChecker. + :type hana_checker_angi: HanaClusterStatusChecker + """ + xml_str = """ + + + + + + + + + + + + + + + + + + + """ + result = hana_checker_angi._process_node_attributes(ET.fromstring(xml_str)) + + assert result["primary_node"] == "node1" + assert result["secondary_node"] == "node2" + assert result["primary_site_name"] == "SITEA" + + def test_process_node_attributes_both_nodes(self, hana_checker_classic): + """ + Test processing node attributes with both primary and secondary nodes. + + :param hana_checker_classic: Instance of HanaClusterStatusChecker. + :type hana_checker_classic: HanaClusterStatusChecker """ xml_str = """ @@ -117,7 +202,7 @@ def test_process_node_attributes_both_nodes(self, hana_checker): """ - result = hana_checker._process_node_attributes(ET.fromstring(xml_str)) + result = hana_checker_classic._process_node_attributes(ET.fromstring(xml_str)) assert result["primary_node"] == "node1" assert result["secondary_node"] == "node2" @@ -125,54 +210,54 @@ def test_process_node_attributes_both_nodes(self, hana_checker): assert result["replication_mode"] == "syncmem" assert result["primary_site_name"] == "site1" - def test_is_cluster_ready(self, hana_checker): + def test_is_cluster_ready(self, hana_checker_classic): """ Test the _is_cluster_ready method. - :param hana_checker: Instance of HanaClusterStatusChecker. - :type hana_checker: HanaClusterStatusChecker + :param hana_checker_classic: Instance of HanaClusterStatusChecker. + :type hana_checker_classic: HanaClusterStatusChecker """ - hana_checker.result["primary_node"] = "" - assert not hana_checker._is_cluster_ready() + hana_checker_classic.result["primary_node"] = "" + assert not hana_checker_classic._is_cluster_ready() - hana_checker.result["primary_node"] = "node1" - assert hana_checker._is_cluster_ready() + hana_checker_classic.result["primary_node"] = "node1" + assert hana_checker_classic._is_cluster_ready() - def test_is_cluster_stable(self, hana_checker): + def test_is_cluster_stable(self, hana_checker_classic): """ Test the _is_cluster_stable method. - :param hana_checker: Instance of HanaClusterStatusChecker. - :type hana_checker: HanaClusterStatusChecker + :param hana_checker_classic: Instance of HanaClusterStatusChecker. + :type hana_checker_classic: HanaClusterStatusChecker """ - hana_checker.result["primary_node"] = "" - hana_checker.result["secondary_node"] = "" - assert not hana_checker._is_cluster_stable() + hana_checker_classic.result["primary_node"] = "" + hana_checker_classic.result["secondary_node"] = "" + assert not hana_checker_classic._is_cluster_stable() - hana_checker.result["primary_node"] = "node1" - hana_checker.result["secondary_node"] = "" - assert not hana_checker._is_cluster_stable() + hana_checker_classic.result["primary_node"] = "node1" + hana_checker_classic.result["secondary_node"] = "" + assert not hana_checker_classic._is_cluster_stable() - hana_checker.result["primary_node"] = "node1" - hana_checker.result["secondary_node"] = "node2" - assert hana_checker._is_cluster_stable() + hana_checker_classic.result["primary_node"] = "node1" + hana_checker_classic.result["secondary_node"] = "node2" + assert hana_checker_classic._is_cluster_stable() - def test_run(self, mocker, hana_checker): + def test_run(self, mocker, hana_checker_classic): """ Test the run method of the HanaClusterStatusChecker class. :param mocker: Mocking library for Python. :type mocker: _mocker.MagicMock - :param hana_checker: Instance of HanaClusterStatusChecker. - :type hana_checker: HanaClusterStatusChecker + :param hana_checker_classic: Instance of HanaClusterStatusChecker. + :type hana_checker_classic: HanaClusterStatusChecker """ mock_super_run = mocker.patch( "src.module_utils.get_cluster_status.BaseClusterStatusChecker.run", return_value={"status": "PASSED"}, ) - mock_get_automation = mocker.patch.object(hana_checker, "_get_automation_register") + mock_get_automation = mocker.patch.object(hana_checker_classic, "_get_automation_register") - result = hana_checker.run() + result = hana_checker_classic.run() mock_super_run.assert_called_once() mock_get_automation.assert_called_once() @@ -194,9 +279,14 @@ def test_run_module(self, mocker): mock_ansible_module = mocker.MagicMock() mock_ansible_module.params = { "database_sid": "TEST", - "ansible_os_family": "REDHAT", "operation_step": "check", + "saphanasr_provider": "SAPHanaSR", + "db_instance_number": "00", } + mocker.patch( + "src.modules.get_cluster_status_db.ansible_facts", return_value={"os_family": "REDHAT"} + ) + mocker.patch( "src.modules.get_cluster_status_db.AnsibleModule", return_value=mock_ansible_module ) diff --git a/tests/modules/get_cluster_status_scs_test.py b/tests/modules/get_cluster_status_scs_test.py index 1fe334d3..e72bf527 100644 --- a/tests/modules/get_cluster_status_scs_test.py +++ b/tests/modules/get_cluster_status_scs_test.py @@ -186,6 +186,9 @@ def test_run_module(self, mocker): mocker.patch( "src.modules.get_cluster_status_scs.AnsibleModule", return_value=mock_ansible_module ) + mocker.patch( + "src.modules.get_cluster_status_scs.ansible_facts", return_value={"os_family": "REDHAT"} + ) mock_run = mocker.MagicMock() mock_checker = mocker.MagicMock() diff --git a/tests/modules/get_pcmk_properties_db_test.py b/tests/modules/get_pcmk_properties_db_test.py index 58b11860..9d7d5664 100644 --- a/tests/modules/get_pcmk_properties_db_test.py +++ b/tests/modules/get_pcmk_properties_db_test.py @@ -6,8 +6,10 @@ """ import io +import xml.etree.ElementTree as ET import pytest from src.modules.get_pcmk_properties_db import HAClusterValidator, main +from src.module_utils.enums import OperatingSystemFamily, HanaSRProvider, TestStatus DUMMY_XML_RSC = """ @@ -88,8 +90,8 @@ DUMMY_GLOBAL_INI = """[DEFAULT] dumm1 = dummy2 -[ha_dr_provider_SAPHanaSR] -provider = SAPHanaSR +[ha_dr_provider_sushanasr] +provider = SAPHanaSR-angi """ DUMMY_CONSTANTS = { @@ -126,7 +128,7 @@ "OS_PARAMETERS": { "DEFAULTS": {"sysctl": {"kernel.numa_balancing": "kernel.numa_balancing = 0"}} }, - "GLOBAL_INI": {"REDHAT": {"provider": "SAPHanaSR"}}, + "GLOBAL_INI": {"REDHAT": {"provider": "SAPHanaSR"}, "SUSE": {"provider": "SAPHanaSR-angi"}}, "CONSTRAINTS": {"rsc_location": {"score": "INFINITY"}}, } @@ -198,7 +200,7 @@ def mock_execute_command(*args, **kwargs): :return: Mocked command output. :rtype: str """ - command = args[1] if len(args) > 1 else kwargs.get("command") + command = str(args[1]) if len(args) > 1 else str(kwargs.get("command")) if "sysctl" in command: return DUMMY_OS_COMMAND return mock_xml_outputs.get(command[-1], "") @@ -209,15 +211,268 @@ def mock_execute_command(*args, **kwargs): ) monkeypatch.setattr("builtins.open", fake_open_factory(DUMMY_GLOBAL_INI)) return HAClusterValidator( - os_type="REDHAT", + os_type=OperatingSystemFamily.REDHAT, os_version="9.2", sid="PRD", instance_number="00", fencing_mechanism="AFA", virtual_machine_name="vmname", constants=DUMMY_CONSTANTS, + saphanasr_provider=HanaSRProvider.SAPHANASR, ) + @pytest.fixture + def validator_angi(self, monkeypatch, mock_xml_outputs): + """ + Fixture for creating a HAClusterValidator instance. + + :param monkeypatch: Monkeypatch fixture for mocking. + :type monkeypatch: pytest.MonkeyPatch + :param mock_xml_outputs: Mock XML outputs. + :type mock_xml_outputs: dict + :return: HAClusterValidator instance. + :rtype: HAClusterValidator + """ + + def mock_execute_command(*args, **kwargs): + """ + Mock function to replace execute_command_subprocess. + + :param *args: Positional arguments. + :param **kwargs: Keyword arguments. + :return: Mocked command output. + :rtype: str + """ + command = str(args[1]) if len(args) > 1 else str(kwargs.get("command")) + if "sysctl" in command: + return DUMMY_OS_COMMAND + return mock_xml_outputs.get(command[-1], "") + + monkeypatch.setattr( + "src.module_utils.sap_automation_qa.SapAutomationQA.execute_command_subprocess", + mock_execute_command, + ) + monkeypatch.setattr("builtins.open", fake_open_factory(DUMMY_GLOBAL_INI)) + return HAClusterValidator( + os_type=OperatingSystemFamily.SUSE, + os_version="9.2", + sid="PRD", + instance_number="00", + fencing_mechanism="AFA", + virtual_machine_name="vmname", + constants=DUMMY_CONSTANTS, + saphanasr_provider=HanaSRProvider.ANGI, + ) + + def test_get_expected_value_fence_config(self, validator): + """ + Test _get_expected_value method with fence configuration. + """ + validator.fencing_mechanism = "azure-fence-agent" + expected = validator._get_expected_value("crm_config", "priority") + assert expected == "10" + + def test_get_expected_value_os_config(self, validator): + """ + Test _get_expected_value method with OS configuration. + """ + expected = validator._get_expected_value("crm_config", "stonith-enabled") + assert expected == "true" + + def test_get_expected_value_defaults(self, validator): + """ + Test _get_expected_value method with defaults. + """ + expected = validator._get_expected_value("crm_config", "unknown-param") + assert expected is None + + def test_get_resource_expected_value_meta_attributes(self, validator): + """ + Test _get_resource_expected_value method for meta_attributes section. + """ + expected = validator._get_resource_expected_value( + "fence_agent", "meta_attributes", "pcmk_delay_max" + ) + assert expected == "15" + + def test_get_resource_expected_value_operations(self, validator): + """ + Test _get_resource_expected_value method for operations section. + """ + expected = validator._get_resource_expected_value( + "fence_agent", "operations", "timeout", "monitor" + ) + assert expected == ["700", "700s"] + + def test_get_resource_expected_value_unknown_section(self, validator): + """ + Test _get_resource_expected_value method for unknown section. + """ + expected = validator._get_resource_expected_value("fence_agent", "unknown_section", "param") + assert expected is None + + def test_create_parameter_with_empty_value(self, validator): + """ + Test _create_parameter method when value is empty. + """ + param = validator._create_parameter( + category="test_category", name="test_param", value="", expected_value="expected" + ) + assert param["status"] == TestStatus.INFO.value + + def test_create_parameter_with_list_expected_value_success(self, validator): + """ + Test _create_parameter method with list expected value - success case. + """ + param = validator._create_parameter( + category="test_category", + name="test_param", + value="value1", + expected_value=["value1", "value2"], + ) + assert param["status"] == TestStatus.SUCCESS.value + assert param["expected_value"] == "value1" + + def test_create_parameter_with_list_expected_value_error(self, validator): + """ + Test _create_parameter method with list expected value - error case. + """ + param = validator._create_parameter( + category="test_category", + name="test_param", + value="value3", + expected_value=["value1", "value2"], + ) + assert param["status"] == TestStatus.ERROR.value + + def test_create_parameter_with_string_expected_value_success(self, validator): + """ + Test _create_parameter method with string expected value - success case. + """ + param = validator._create_parameter( + category="test_category", + name="test_param", + value="expected_value", + expected_value="expected_value", + ) + assert param["status"] == TestStatus.SUCCESS.value + + def test_create_parameter_with_string_expected_value_error(self, validator): + """ + Test _create_parameter method with string expected value - error case. + """ + param = validator._create_parameter( + category="test_category", + name="test_param", + value="actual_value", + expected_value="expected_value", + ) + assert param["status"] == TestStatus.ERROR.value + + def test_create_parameter_with_invalid_expected_value_type(self, validator): + """ + Test _create_parameter method with invalid expected value type. + """ + param = validator._create_parameter( + category="test_category", + name="test_param", + value="test_value", + expected_value={"invalid": "type"}, + ) + assert param["status"] == TestStatus.ERROR.value + + def test_create_parameter_with_none_expected_value(self, validator): + """ + Test _create_parameter method when expected_value is None. + """ + param = validator._create_parameter( + category="crm_config", name="test_param", value="test_value", expected_value=None + ) + assert param["status"] == TestStatus.INFO.value + + def test_parse_global_ini_parameters_angi_provider(self, validator_angi): + """ + Test _parse_global_ini_parameters method with ANGI provider. + Covers lines 420-447. + """ + result = validator_angi.get_result() + assert "details" in result + assert "parameters" in result["details"] + + def test_parse_basic_config(self, validator): + """ + Test _parse_basic_config method. + Covers lines 462-473. + """ + xml_str = """ + + + """ + params = validator._parse_basic_config( + ET.fromstring(xml_str), "crm_config", "test_subcategory" + ) + assert len(params) == 2 + assert params[0]["category"] == "crm_config_test_subcategory" + assert params[0]["name"] == "test_param" + assert params[0]["value"] == "test_value" + + def test_parse_resource_hana_meta_and_topology_meta(self, validator): + """ + Test _parse_resource method for hana_meta and topology_meta categories. + Covers lines 486-521. + """ + xml_str = """ + + """ + element = ET.fromstring(xml_str) + params = validator._parse_resource(element, "hana_meta") + assert len(params) > 0 + params = validator._parse_resource(element, "topology_meta") + assert len(params) > 0 + + def test_parse_constraints_with_valid_constraints(self, validator_angi): + """ + Test _parse_constraints method with valid constraints. + Covers lines 532-552. + """ + xml_str = """ + + + + + """ + root = ET.fromstring(xml_str) + params = validator_angi._parse_constraints(root) + constraint_params = [p for p in params if p["category"] == "constraints_rsc_location"] + assert len(constraint_params) >= 1 + + def test_parse_ha_cluster_config_redhat_skip_op_defaults(self, monkeypatch): + """ + Test parse_ha_cluster_config method with REDHAT OS skipping op_defaults. + Covers lines 574-607. + """ + + def mock_execute_command(*args, **kwargs): + return "" + + monkeypatch.setattr( + "src.module_utils.sap_automation_qa.SapAutomationQA.execute_command_subprocess", + mock_execute_command, + ) + monkeypatch.setattr("builtins.open", fake_open_factory(DUMMY_GLOBAL_INI)) + validator = HAClusterValidator( + os_type=OperatingSystemFamily.REDHAT, + os_version="9.2", + sid="PRD", + instance_number="00", + fencing_mechanism="AFA", + virtual_machine_name="vmname", + constants=DUMMY_CONSTANTS, + saphanasr_provider=HanaSRProvider.SAPHANASR, + ) + result = validator.get_result() + assert "details" in result + def test_parse_ha_cluster_config_success(self, validator): """ Test the parse_ha_cluster_config method for successful parsing. @@ -246,11 +501,11 @@ def __init__(self, *args, **kwargs): self.params = { "sid": "PRD", "instance_number": "00", - "ansible_os_family": "REDHAT", "virtual_machine_name": "vm_name", "fencing_mechanism": "AFA", "os_version": "9.2", "pcmk_constants": DUMMY_CONSTANTS, + "saphanasr_provider": HanaSRProvider.SAPHANASR.value, } def exit_json(self, **kwargs): diff --git a/tests/modules/get_pcmk_properties_scs_test.py b/tests/modules/get_pcmk_properties_scs_test.py index 4349c5c9..82bbc2df 100644 --- a/tests/modules/get_pcmk_properties_scs_test.py +++ b/tests/modules/get_pcmk_properties_scs_test.py @@ -6,8 +6,10 @@ """ import io +import xml.etree.ElementTree as ET import pytest from src.modules.get_pcmk_properties_scs import HAClusterValidator, main +from src.module_utils.enums import OperatingSystemFamily, TestStatus DUMMY_XML_RSC = """ @@ -115,6 +117,11 @@ "operations": {"monitor": {"timeout": "30"}}, }, "hana": {"meta_attributes": {"clone-max": "2"}}, + "fence_agent": { + "meta_attributes": {"pcmk_delay_max": "15"}, + "operations": {"monitor": {"timeout": ["700", "700s"]}}, + "instance_attributes": {"resourceGroup": "test-rg"}, + }, } }, "OS_PARAMETERS": { @@ -192,7 +199,7 @@ def mock_execute_command(*args, **kwargs): :return: Mocked command output. :rtype: str """ - command = args[1] if len(args) > 1 else kwargs.get("command") + command = str(args[1]) if len(args) > 1 else str(kwargs.get("command")) if "sysctl" in command: return DUMMY_OS_COMMAND return mock_xml_outputs.get(command[-1], "") @@ -203,7 +210,7 @@ def mock_execute_command(*args, **kwargs): ) monkeypatch.setattr("builtins.open", fake_open_factory(DUMMY_GLOBAL_INI)) return HAClusterValidator( - os_type="REDHAT", + os_type=OperatingSystemFamily.REDHAT, sid="PRD", scs_instance_number="00", ers_instance_number="01", @@ -241,7 +248,6 @@ def __init__(self, *args, **kwargs): "sid": "PRD", "ascs_instance_number": "00", "ers_instance_number": "01", - "ansible_os_family": "REDHAT", "virtual_machine_name": "vm_name", "fencing_mechanism": "AFA", "pcmk_constants": DUMMY_CONSTANTS, @@ -251,11 +257,198 @@ def exit_json(self, **kwargs): nonlocal mock_result mock_result = kwargs + def mock_ansible_facts(module): + """ + Mock function to return Ansible facts. + + :param module: Ansible module instance. + :type module: AnsibleModule + :return: Mocked Ansible facts. + :rtype: dict + """ + return {"os_family": "REDHAT"} + monkeypatch.setattr( "src.modules.get_pcmk_properties_scs.AnsibleModule", MockAnsibleModule, ) + monkeypatch.setattr( + "src.modules.get_pcmk_properties_scs.ansible_facts", + mock_ansible_facts, + ) main() assert mock_result["status"] == "PASSED" + + def test_get_expected_value_fence_config(self, validator): + """ + Test _get_expected_value method with fence configuration. + """ + validator.fencing_mechanism = "azure-fence-agent" + expected = validator._get_expected_value("crm_config", "priority") + assert expected == "10" + + def test_get_resource_expected_value_meta_attributes(self, validator): + """ + Test _get_resource_expected_value method for meta_attributes section. + """ + expected = validator._get_resource_expected_value( + "fence_agent", "meta_attributes", "pcmk_delay_max" + ) + assert expected == "15" + + def test_create_parameter_with_none_expected_value_resource_category(self, validator): + """ + Test _create_parameter method when expected_value is None and category is + in RESOURCE_CATEGORIES. + """ + param = validator._create_parameter( + category="ipaddr", name="test_param", value="test_value", subcategory="meta_attributes" + ) + assert param["category"] == "ipaddr_meta_attributes" + + def test_create_parameter_with_none_expected_value_or_empty_value(self, validator): + """ + Test _create_parameter method when expected_value is None or value is empty. + + """ + param = validator._create_parameter( + category="crm_config", name="test_param", value="test_value", expected_value=None + ) + assert param["status"] == TestStatus.INFO.value + + param = validator._create_parameter( + category="crm_config", name="test_param", value="", expected_value="expected" + ) + assert param["status"] == TestStatus.INFO.value + + def test_parse_resource_with_meta_and_instance_attributes(self, validator): + """ + Test _parse_resource method with meta_attributes and instance_attributes. + """ + xml_str = """ + + + + + + + """ + element = ET.fromstring(xml_str) + + params = validator._parse_resource(element, "sbd_stonith") + + meta_params = [p for p in params if p["category"] == "sbd_stonith_meta_attributes"] + instance_params = [p for p in params if p["category"] == "sbd_stonith_instance_attributes"] + + assert len(meta_params) >= 1 + assert len(instance_params) >= 1 + + def test_parse_basic_config(self, validator): + """ + Test _parse_basic_config method. + """ + xml_str = """ + + + """ + element = ET.fromstring(xml_str) + + params = validator._parse_basic_config(element, "crm_config", "test_subcategory") + + assert len(params) == 2 + assert params[0]["category"] == "crm_config_test_subcategory" + assert params[0]["name"] == "test_param" + assert params[0]["value"] == "test_value" + + def test_parse_constraints_with_missing_attributes(self, validator): + """ + Test _parse_constraints method with missing attributes. + """ + xml_str = """ + + """ + root = ET.fromstring(xml_str) + params = validator._parse_constraints(root) + assert isinstance(params, list) + + def test_parse_ha_cluster_config_with_empty_root(self, monkeypatch): + """ + Test parse_ha_cluster_config method when root is empty. + Covers lines 508-546. + """ + + def mock_execute_command(*args, **kwargs): + return "" + + monkeypatch.setattr( + "src.module_utils.sap_automation_qa.SapAutomationQA.execute_command_subprocess", + mock_execute_command, + ) + + validator = HAClusterValidator( + os_type=OperatingSystemFamily.SUSE, + sid="PRD", + scs_instance_number="00", + ers_instance_number="01", + fencing_mechanism="AFA", + virtual_machine_name="vmname", + constants=DUMMY_CONSTANTS, + ) + + result = validator.get_result() + assert "details" in result + + def test_get_resource_expected_value_operations_section(self, validator): + """ + Test _get_resource_expected_value method for operations section. + """ + expected = validator._get_resource_expected_value( + "fence_agent", "operations", "timeout", "monitor" + ) + assert expected == ["700", "700s"] + + def test_get_resource_expected_value_return_none(self, validator): + """ + Test _get_resource_expected_value method returns None for unknown section. + """ + expected = validator._get_resource_expected_value("fence_agent", "unknown_section", "param") + assert expected is None + + def test_create_parameter_with_list_expected_value_success(self, validator): + """ + Test _create_parameter method with list expected value - success case. + """ + param = validator._create_parameter( + category="test_category", + name="test_param", + value="value1", + expected_value=["value1", "value2"], + ) + assert param["status"] == TestStatus.SUCCESS.value + assert param["expected_value"] == "value1" + + def test_create_parameter_with_list_expected_value_error(self, validator): + """ + Test _create_parameter method with list expected value - error case. + """ + param = validator._create_parameter( + category="test_category", + name="test_param", + value="value3", + expected_value=["value1", "value2"], + ) + assert param["status"] == TestStatus.ERROR.value + + def test_create_parameter_with_invalid_expected_value_type(self, validator): + """ + Test _create_parameter method with invalid expected value type. + """ + param = validator._create_parameter( + category="test_category", + name="test_param", + value="test_value", + expected_value=123, + ) + assert param["status"] == TestStatus.ERROR.value diff --git a/tests/modules/location_constraints_test.py b/tests/modules/location_constraints_test.py index 32934aae..e63eae86 100644 --- a/tests/modules/location_constraints_test.py +++ b/tests/modules/location_constraints_test.py @@ -8,6 +8,7 @@ import xml.etree.ElementTree as ET import pytest from src.modules.location_constraints import LocationConstraintsManager, main +from src.module_utils.enums import OperatingSystemFamily LC_STR = """ @@ -49,7 +50,7 @@ def location_constraints_manager(self): :return: LocationConstraintsManager instance :rtype: LocationConstraintsManager """ - return LocationConstraintsManager(ansible_os_family="SUSE") + return LocationConstraintsManager(ansible_os_family=OperatingSystemFamily.SUSE) def test_location_constraints_exists_success( self, @@ -131,7 +132,7 @@ class MockAnsibleModule: """ def __init__(self, argument_spec, supports_check_mode): - self.params = {"action": "remove", "ansible_os_family": "SUSE"} + self.params = {"action": "remove"} self.check_mode = False def exit_json(self, **kwargs): @@ -140,9 +141,23 @@ def exit_json(self, **kwargs): """ mock_result.update(kwargs) + def mock_ansible_facts(module): + """ + Mock function to return Ansible facts. + + :param module: Mock Ansible module instance. + :type module: MockAnsibleModule + :return: Dictionary with Ansible facts. + :rtype: dict + """ + return {"os_family": "SUSE"} + with monkeypatch.context() as monkey_patch: monkey_patch.setattr( "src.modules.location_constraints.AnsibleModule", MockAnsibleModule ) + monkey_patch.setattr( + "src.modules.location_constraints.ansible_facts", mock_ansible_facts + ) main() assert mock_result["status"] == "INFO" diff --git a/tests/modules/log_parser_test.py b/tests/modules/log_parser_test.py index 15f984d2..c8c550da 100644 --- a/tests/modules/log_parser_test.py +++ b/tests/modules/log_parser_test.py @@ -8,6 +8,7 @@ import json import pytest from src.modules.log_parser import LogParser, PCMK_KEYWORDS, SYS_KEYWORDS, main +from src.module_utils.enums import OperatingSystemFamily class TestLogParser: @@ -27,7 +28,7 @@ def log_parser_redhat(self): start_time="2025-01-01 00:00:00", end_time="2025-01-01 23:59:59", log_file="test_log_file.log", - ansible_os_family="REDHAT", + ansible_os_family=OperatingSystemFamily.REDHAT, ) @pytest.fixture @@ -42,7 +43,7 @@ def log_parser_suse(self): start_time="2023-01-01 00:00:00", end_time="2023-01-01 23:59:59", log_file="test_log_file.log", - ansible_os_family="SUSE", + ansible_os_family=OperatingSystemFamily.SUSE, ) def test_parse_logs_success(self, mocker, log_parser_redhat): @@ -152,8 +153,20 @@ def __init__(self, argument_spec, supports_check_mode): def exit_json(self, **kwargs): mock_result.update(kwargs) + def mock_ansible_facts(module): + """ + Mock function to return Ansible facts for RedHat. + + :param module: Mock Ansible module instance. + :type module: MockAnsibleModule + :return: Dictionary with Ansible facts. + :rtype: dict + """ + return {"os_family": "RedHat"} + with monkeypatch.context() as monkey_patch: monkey_patch.setattr("src.modules.log_parser.AnsibleModule", MockAnsibleModule) + monkey_patch.setattr("src.modules.log_parser.ansible_facts", mock_ansible_facts) main() assert mock_result["status"] == "FAILED" @@ -180,6 +193,29 @@ def test_merge_logs_success(self, log_parser_redhat): assert len(filtered_logs) == len(log_parser_redhat.logs) assert result["status"] == "PASSED" + def test_merge_logs_success_suse(self, log_parser_suse): + """ + Test the merge_logs method for successful log merging. + + :param log_parser_suse: LogParser instance. + :type log_parser_suse: LogParser + """ + log_parser_suse.logs = [ + '["Jan 01 12:34:56 server1 pacemaker-controld: Notice: ' + 'Resource SAPHana_HDB_00 started"]', + '["Jan 01 12:35:00 server2 pacemaker-controld: Notice: ' + 'Resource SAPHana_HDB_01 started"]', + '["Jan 01 12:36:00 server3 pacemaker-controld: Notice: ' + 'Resource SAPHana_HDB_02 started"]', + ] + + log_parser_suse.merge_logs() + result = log_parser_suse.get_result() + + filtered_logs = [log.strip() for log in json.loads(result["filtered_logs"])] + assert len(filtered_logs) == len(log_parser_suse.logs) + assert result["status"] == "PASSED" + def test_merge_logs_empty_input(self, log_parser_redhat): """ Test the merge_logs method with empty input. @@ -215,3 +251,104 @@ def test_merge_logs_invalid_json(self, log_parser_redhat): filtered_logs = [log.strip() for log in json.loads(result["filtered_logs"])] assert len(filtered_logs) == 2 assert result["status"] == "PASSED" + + def test_merge_logs_suse_timestamp_parsing(self, log_parser_suse): + """ + Test the merge_logs method with SUSE timestamp format. + """ + log_parser_suse.logs = [ + '["2023-01-01T12:34:56.123456789+01:00 server1 pacemaker-controld: Notice: Resource SAPHana_HDB_00 started"]', + '["2023-01-01T12:35:00.987654321+01:00 server2 pacemaker-controld: Notice: Resource SAPHana_HDB_01 started"]', + ] + log_parser_suse.merge_logs() + result = log_parser_suse.get_result() + filtered_logs = json.loads(result["filtered_logs"]) + assert len(filtered_logs) == 2 + assert result["status"] == "PASSED" + + def test_merge_logs_unknown_os_family(self, monkeypatch): + """ + Test the merge_logs method with unknown OS family. + """ + + def mock_execute_command(*args, **kwargs): + return "" + + monkeypatch.setattr( + "src.module_utils.sap_automation_qa.SapAutomationQA.execute_command_subprocess", + mock_execute_command, + ) + log_parser_unknown = LogParser( + start_time="2023-01-01 00:00:00", + end_time="2023-01-01 23:59:59", + log_file="test_log_file.log", + ansible_os_family=OperatingSystemFamily.DEBIAN, + ) + + log_parser_unknown.logs = [ + '["Jan 01 12:34:56 server1 pacemaker-controld: Notice: Resource SAPHana_HDB_00 started"]', + ] + + log_parser_unknown.merge_logs() + result = log_parser_unknown.get_result() + + filtered_logs = json.loads(result["filtered_logs"]) + assert len(filtered_logs) == 1 + assert result["status"] == "PASSED" + + def test_parse_logs_suse_timestamp_format(self, mocker, log_parser_suse): + """ + Test the parse_logs method with SUSE timestamp format. + """ + mocker.patch( + "builtins.open", + mocker.mock_open( + read_data="""2023-01-01T12:34:56.123456789+01:00 nodename SAPHana: SAP HANA action +2023-01-01T12:35:00.987654321+01:00 nodename pacemaker-controld: Pacemaker action""" + ), + ) + + log_parser_suse.parse_logs() + result = log_parser_suse.get_result() + + filtered_logs = json.loads(result["filtered_logs"]) + assert len(filtered_logs) == 2 + assert result["status"] == "PASSED" + + def test_run_module_merge_logs_function(self, monkeypatch): + """ + Test the run_module function with merge_logs function parameter. + """ + mock_result = {} + + class MockAnsibleModule: + """ + Mock AnsibleModule for testing merge_logs function. + """ + + def __init__(self, argument_spec, supports_check_mode): + self.params = { + "start_time": "2023-01-01 00:00:00", + "end_time": "2023-01-01 23:59:59", + "log_file": "test_log_file.log", + "function": "merge_logs", + "logs": ['["Jan 01 12:34:56 server1 test log"]'], + } + self.check_mode = False + + def exit_json(self, **kwargs): + mock_result.update(kwargs) + + def mock_ansible_facts(module): + """ + Mock function to return Ansible facts. + """ + return {"os_family": "RedHat"} + + with monkeypatch.context() as monkey_patch: + monkey_patch.setattr("src.modules.log_parser.AnsibleModule", MockAnsibleModule) + monkey_patch.setattr("src.modules.log_parser.ansible_facts", mock_ansible_facts) + from src.modules.log_parser import run_module + + run_module() + assert mock_result["status"] == "PASSED" diff --git a/tests/roles/ha_db_hana/block_network_test.py b/tests/roles/ha_db_hana/block_network_test.py index 6ec64a81..021854ec 100644 --- a/tests/roles/ha_db_hana/block_network_test.py +++ b/tests/roles/ha_db_hana/block_network_test.py @@ -51,6 +51,7 @@ def test_environment(self, ansible_inventory): "bin/nc", "bin/echo", "bin/sleep", + "bin/SAPHanaSR-manageProvider", ] temp_dir = self.setup_test_environment( @@ -101,8 +102,8 @@ def test_functional_db_primary_node_success(self, test_environment, ansible_inve assert result.rc == 0, ( f"Playbook failed with status: {result.rc}\n" - f"STDOUT: {result.stdout.read() if result.stdout else 'No output'}\n" - f"STDERR: {result.stderr.read() if result.stderr else 'No errors'}\n" + f"STDOUT: {result.stdout if hasattr(result, 'stdout') else 'No output'}\n" + f"STDERR: {result.stderr if hasattr(result, 'stderr') else 'No errors'}\n" f"Events: {[e.get('event') for e in result.events if 'event' in e]}" ) diff --git a/tests/roles/ha_db_hana/ha_config_test.py b/tests/roles/ha_db_hana/ha_config_test.py index 16944dd6..e6be97df 100644 --- a/tests/roles/ha_db_hana/ha_config_test.py +++ b/tests/roles/ha_db_hana/ha_config_test.py @@ -59,6 +59,7 @@ def test_environment(self, ansible_inventory): "project/library/get_package_list", "bin/crm_resource", "bin/crm", + "bin/SAPHanaSR-manageProvider", ], extra_vars_override={"node_tier": "hana"}, ) diff --git a/tests/roles/ha_db_hana/primary_node_ops_test.py b/tests/roles/ha_db_hana/primary_node_ops_test.py index 166a00ba..ec441c7d 100644 --- a/tests/roles/ha_db_hana/primary_node_ops_test.py +++ b/tests/roles/ha_db_hana/primary_node_ops_test.py @@ -113,6 +113,7 @@ def test_environment(self, ansible_inventory, task_type): "bin/crm", "bin/echo", "bin/killall", + "bin/SAPHanaSR-manageProvider", ] if task_type["task_name"] == "sbd-fencing": diff --git a/tests/roles/ha_db_hana/resource_migration_test.py b/tests/roles/ha_db_hana/resource_migration_test.py index a5f65e79..a9a34493 100644 --- a/tests/roles/ha_db_hana/resource_migration_test.py +++ b/tests/roles/ha_db_hana/resource_migration_test.py @@ -60,6 +60,10 @@ def test_environment(self, ansible_inventory): "name": "get_hana_resource_id", "SUSE": "cibadmin --query --scope resources", }, + { + "name": "get_hana_resource_id_saphanasr_angi", + "SUSE": "cibadmin --query --scope resources", + }, ] temp_dir = self.setup_test_environment( @@ -75,6 +79,7 @@ def test_environment(self, ansible_inventory): "bin/cibadmin", "bin/crm_resource", "bin/crm", + "bin/SAPHanaSR-manageProvider", ], extra_vars_override={"commands": commands, "node_tier": "hana"}, ) diff --git a/tests/roles/ha_db_hana/secondary_node_ops_test.py b/tests/roles/ha_db_hana/secondary_node_ops_test.py index 82a8924d..2ffe698f 100644 --- a/tests/roles/ha_db_hana/secondary_node_ops_test.py +++ b/tests/roles/ha_db_hana/secondary_node_ops_test.py @@ -87,6 +87,7 @@ def test_environment(self, ansible_inventory, task_type): "bin/crm_resource", "bin/echo", "bin/killall", + "bin/SAPHanaSR-manageProvider", ], extra_vars_override={"node_tier": "hana"}, ) diff --git a/tests/roles/mock_data/SAPHanaSR-manageProvider.txt b/tests/roles/mock_data/SAPHanaSR-manageProvider.txt new file mode 100644 index 00000000..81a3b395 --- /dev/null +++ b/tests/roles/mock_data/SAPHanaSR-manageProvider.txt @@ -0,0 +1,5 @@ +#!/bin/bash + +echo "[ha_dr_provider_saphanasr]" + +exit 0 \ No newline at end of file diff --git a/tests/roles/mock_data/check_indexserver.txt b/tests/roles/mock_data/check_indexserver.txt index 2bc840a7..b939acd1 100644 --- a/tests/roles/mock_data/check_indexserver.txt +++ b/tests/roles/mock_data/check_indexserver.txt @@ -7,7 +7,6 @@ def main(): module = AnsibleModule( argument_spec=dict( database_sid=dict(type="str", required=True), - ansible_os_family=dict(type="str", required=True), ) ) diff --git a/tests/roles/mock_data/get_cluster_status_db.txt b/tests/roles/mock_data/get_cluster_status_db.txt index b8478690..2658cf76 100644 --- a/tests/roles/mock_data/get_cluster_status_db.txt +++ b/tests/roles/mock_data/get_cluster_status_db.txt @@ -9,7 +9,8 @@ def main(): argument_spec=dict( operation_step=dict(type="str", required=True), database_sid=dict(type="str", required=True), - ansible_os_family=dict(type="str", required=False), + saphanasr_provider=dict(type="str", required=True), + db_instance_number=dict(type="str", required=True), ) ) diff --git a/tests/roles/mock_data/get_cluster_status_scs.txt b/tests/roles/mock_data/get_cluster_status_scs.txt index a868a4ab..4d8625e5 100644 --- a/tests/roles/mock_data/get_cluster_status_scs.txt +++ b/tests/roles/mock_data/get_cluster_status_scs.txt @@ -9,7 +9,6 @@ def main(): module = AnsibleModule( argument_spec=dict( sap_sid=dict(type='str', required=True), - ansible_os_family=dict(type='str', required=True), ) ) diff --git a/tests/roles/mock_data/get_pcmk_properties_db.txt b/tests/roles/mock_data/get_pcmk_properties_db.txt index cefe2503..856c8ad3 100644 --- a/tests/roles/mock_data/get_pcmk_properties_db.txt +++ b/tests/roles/mock_data/get_pcmk_properties_db.txt @@ -6,11 +6,11 @@ def main(): argument_spec=dict( sid=dict(type="str"), instance_number=dict(type="str"), - ansible_os_family=dict(type="str"), virtual_machine_name=dict(type="str"), fencing_mechanism=dict(type="str"), os_version=dict(type="str"), pcmk_constants=dict(type="dict"), + saphanasr_provider=dict(type="str", required=True) ) ) diff --git a/tests/roles/mock_data/get_pcmk_properties_scs.txt b/tests/roles/mock_data/get_pcmk_properties_scs.txt index 57927b71..a72ae8f9 100644 --- a/tests/roles/mock_data/get_pcmk_properties_scs.txt +++ b/tests/roles/mock_data/get_pcmk_properties_scs.txt @@ -7,7 +7,6 @@ def main(): sid=dict(type='str', required=True), ascs_instance_number=dict(type='str', required=True), ers_instance_number=dict(type='str', required=True), - ansible_os_family=dict(type='str', required=True), virtual_machine_name=dict(type='str', required=True), pcmk_constants=dict(type='dict', required=True), fencing_mechanism=dict(type='str', required=True), diff --git a/tests/roles/mock_data/location_constraints.txt b/tests/roles/mock_data/location_constraints.txt index cc44e972..49818ac7 100644 --- a/tests/roles/mock_data/location_constraints.txt +++ b/tests/roles/mock_data/location_constraints.txt @@ -7,13 +7,11 @@ def main(): module = AnsibleModule( argument_spec=dict( action=dict(type="str", required=True), - ansible_os_family=dict(type="str", required=True), ), supports_check_mode=True ) action = module.params["action"] - ansible_os_family = module.params["ansible_os_family"] diff --git a/tests/roles/mock_data/log_parser.txt b/tests/roles/mock_data/log_parser.txt index 762bd3bb..4990f81f 100644 --- a/tests/roles/mock_data/log_parser.txt +++ b/tests/roles/mock_data/log_parser.txt @@ -11,7 +11,6 @@ def main(): end_time=dict(type='str', required=False), log_file=dict(type='str', required=False, default='/var/log/messages'), keywords=dict(type='list', required=False, default=[]), - ansible_os_family=dict(type='str', required=True), function=dict(type='str', required=True), logs=dict(type='list', required=False) ) diff --git a/tests/roles/mock_data/secondary_get_cluster_status_db.txt b/tests/roles/mock_data/secondary_get_cluster_status_db.txt index 5eed9c9d..afbec0a7 100644 --- a/tests/roles/mock_data/secondary_get_cluster_status_db.txt +++ b/tests/roles/mock_data/secondary_get_cluster_status_db.txt @@ -9,7 +9,8 @@ def main(): argument_spec=dict( operation_step=dict(type="str", required=True), database_sid=dict(type="str", required=True), - ansible_os_family=dict(type="str", required=False), + saphanasr_provider=dict(type="str", required=True), + db_instance_number=dict(type="str", required=True) ) ) diff --git a/tests/roles/roles_testing_base.py b/tests/roles/roles_testing_base.py index d9f43c8d..11534f5c 100644 --- a/tests/roles/roles_testing_base.py +++ b/tests/roles/roles_testing_base.py @@ -162,6 +162,7 @@ def setup_test_environment( "misc/tasks/var-log-messages.yml", "misc/tasks/post-telemetry-data.yml", "misc/tasks/loadbalancer.yml", + "misc/tasks/get-saphanasr-provider.yml", ] task_file = f"{role_type}/tasks/{task_name}.yml" From 8f50caa78a126eec0a8777be8f8f50ea840ecfcb Mon Sep 17 00:00:00 2001 From: Devansh Jain <86314060+devanshjainms@users.noreply.github.com> Date: Mon, 30 Jun 2025 13:11:03 -0700 Subject: [PATCH 2/6] Refactor resource migration tasks for improved clarity and organization (#91) --- src/roles/ha_db_hana/tasks/resource-migration.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/roles/ha_db_hana/tasks/resource-migration.yml b/src/roles/ha_db_hana/tasks/resource-migration.yml index 0c609767..d76f1248 100644 --- a/src/roles/ha_db_hana/tasks/resource-migration.yml +++ b/src/roles/ha_db_hana/tasks/resource-migration.yml @@ -32,9 +32,9 @@ test_execution_hostname: "{{ hostvars[cluster_status_pre.primary_node].ansible_hostname }}" - name: "Test Execution: Get HANA resource id for saphanasr_angi" + when: saphanasr_provider | default('SAPHanaSR') == "SAPHanaSR-angi" block: - name: "Test Execution: Get HANA resource id for saphanasr_angi" - when: saphanasr_provider | default('SAPHanaSR') == "SAPHanaSR-angi" ansible.builtin.shell: >- set -o pipefail && {{ commands | selectattr('name','equalto','get_hana_resource_id_saphanasr_angi') From a0b16b7c6a7e8f4e9f8f8a2ee74779466779fe58 Mon Sep 17 00:00:00 2001 From: Devansh Jain <86314060+devanshjainms@users.noreply.github.com> Date: Tue, 8 Jul 2025 12:01:55 -0700 Subject: [PATCH 3/6] Enhancement for offline validation of HA configuration and Support for SUSE and REDHAT as management server --- docs/HIGH_AVAILABILITY.md | 40 +- docs/OFFLINE_VALIDATION.md | 88 +++ requirements.txt | 30 +- scripts/sap_automation_qa.sh | 290 ++++---- scripts/setup.sh | 67 +- scripts/utils.sh | 235 ++++++ src/module_utils/get_pcmk_properties.py | 542 ++++++++++++++ src/module_utils/sap_automation_qa.py | 6 +- src/modules/get_pcmk_properties_db.py | 548 +++----------- src/modules/get_pcmk_properties_scs.py | 472 +++--------- src/modules/render_html_report.py | 6 +- src/playbook_00_ha_db_functional_tests.yml | 2 +- src/playbook_00_ha_scs_functional_tests.yml | 2 +- src/playbook_01_ha_offline_tests.yml | 58 ++ .../ha_db_hana/tasks/ha-config-offline.yml | 46 ++ src/roles/ha_db_hana/tasks/ha-config.yml | 11 +- src/roles/ha_scs/tasks/ha-config-offline.yml | 56 ++ src/roles/misc/tasks/offline-validation.yml | 84 +++ src/roles/misc/tasks/post-telemetry-data.yml | 2 +- src/roles/misc/tasks/render-html-report.yml | 2 +- src/vars/input-api.yaml | 18 + tests/module_utils/get_cluster_status_test.py | 3 +- .../module_utils/get_pcmk_properties_test.py | 464 ++++++++++++ tests/modules/get_pcmk_properties_db_test.py | 693 ++++++++++-------- tests/modules/get_pcmk_properties_scs_test.py | 655 +++++++++-------- 25 files changed, 2795 insertions(+), 1625 deletions(-) create mode 100644 docs/OFFLINE_VALIDATION.md create mode 100644 scripts/utils.sh create mode 100644 src/module_utils/get_pcmk_properties.py create mode 100644 src/playbook_01_ha_offline_tests.yml create mode 100644 src/roles/ha_db_hana/tasks/ha-config-offline.yml create mode 100644 src/roles/ha_scs/tasks/ha-config-offline.yml create mode 100644 src/roles/misc/tasks/offline-validation.yml create mode 100644 tests/module_utils/get_pcmk_properties_test.py diff --git a/docs/HIGH_AVAILABILITY.md b/docs/HIGH_AVAILABILITY.md index d0f25736..33c5309e 100644 --- a/docs/HIGH_AVAILABILITY.md +++ b/docs/HIGH_AVAILABILITY.md @@ -52,11 +52,8 @@ pcs cluster enable --all # for RedHat virtual machine The SAP Testing Automation Framework requires a jumpbox or management server with the following setup: -- **Operating System**: Ubuntu 22.04 LTS. +- **Operating System**: Supported (Ubuntu 22.04 LTS, SLES 15 SP4, 15 SP6). - **Location**: Must be deployed on Azure. - -> [!NOTE] -> Currently, only Ubuntu 22.04 LTS is supported for running the SAP Testing Automation Framework. ### Azure RBAC @@ -91,13 +88,29 @@ The management server must have network connectivity to the SAP system to perfor ### 1. Environment Setup -To set up your enviroment in management server, follow these steps: +To set up your environment in management server, follow these steps: -1.1. **Login to the Ubuntu management server**: +1.1. **Login to the management server**: -Ensure you are logged into the Ubuntu management server that is connected to the SAP system's virtual network. +Ensure you are logged into the management server that is connected to the SAP system's virtual network. -1.2. **Fork and clone the repository**: +1.2. **Install git on management server**: + +```bash +# Debian/Ubuntu +sudo su - +apt-get install git + +# RHEL/CentOS +sudo su - +yum install git + +# SUSE +sudo su - +zypper install git +``` + +1.3. **Fork and clone the repository**: ```bash # sudo to root @@ -111,7 +124,7 @@ git clone https://github.com/GITHUB-USERNAME/sap-automation-qa.git cd sap-automation-qa ``` -1.3. **Run the initial setup script**: +1.4. **Run the initial setup script**: ```bash ./scripts/setup.sh @@ -316,14 +329,11 @@ To execute the script, run following command: # Run specific test cases from HA_DB_HANA group ./scripts/sap_automation_qa.sh --test_groups=HA_DB_HANA --test_cases=[ha-config,primary-node-crash] -# Run all enabled tests in HA_DB_HANA group -./scripts/sap_automation_qa.sh --test_groups=HA_DB_HANA - -# Run all enabled tests in HA_SCS group -./scripts/sap_automation_qa.sh --test_groups=HA_SCS +# Run specific test cases from HA_SCS group +./scripts/sap_automation_qa.sh --test_groups=HA_SCS --test_cases=[ha-config] # Run with verbose output -./scripts/sap_automation_qa.sh --test_groups=HA_DB_HANA --test_cases=[ha-config] -vv +./scripts/sap_automation_qa.sh --test_groups=HA_DB_HANA --test_cases=[primary-node-crash] -vvv ``` ### 4. Viewing Test Results diff --git a/docs/OFFLINE_VALIDATION.md b/docs/OFFLINE_VALIDATION.md new file mode 100644 index 00000000..c5c701f3 --- /dev/null +++ b/docs/OFFLINE_VALIDATION.md @@ -0,0 +1,88 @@ +# SAP Automation QA - Offline Validation + +## Overview + +The offline validation feature enables robust validation of SAP HANA and SAP Central Services High Availability cluster configurations without requiring live cluster access or without connecting to the SAP virtual machines. This capability allows you to analyze cluster configurations from previously collected CIB (Cluster Information Base) XML files, making it ideal for post-incident analysis, compliance auditing, and troubleshooting scenarios. +Offline validation provides a powerful capability for maintaining and auditing SAP HANA cluster configurations without impacting production systems. + +## How Offline Validation Works + +### Architecture Overview + +``` +┌─────────────────┐ ┌──────────────────┐ ┌─────────────────┐ +│ CIB XML │ │ Validation │ │ HTML Report │ +│ Output │───▶│ Engine │───▶│ Generation │ +│ (In files) │ │ │ │ (with Tables) │ +│ │ │ │ │ │ +└─────────────────┘ └──────────────────┘ └─────────────────┘ +``` + + +### Prerequisites + +- SAP Testing Automation Framework (STAF) setup on a management server. Detailed setup instructions can be found in the [STAF Setup Guide](./HIGH_AVAILABILITY.md). +- Previously collected CIB XML files stored in the `WORKSPACES/SYSTEM//offline_validation/` directory. + +### Required Files Structure +```file +WORKSPACES/SYSTEM// +├── hosts.yaml # Ansible inventory +├── sap-parameters.yaml # SAP system parameters +└── offline_validation/ # Output of commands for offline validation + ├── / + │ └── cib # CIB XML file for node 1 + └── / + └── cib # CIB XML file for node 2 +``` + +## How to Perform Offline Validation + +### Step 1: Initial Setup + +This setup is defined in the Getting Started section of the [High Availability Guide](./HIGH_AVAILABILITY.md). Ensure you have the following: + +- Ansible inventory file (`hosts.yaml`) with the SAP system configuration. +- SAP system parameters file (`sap-parameters.yaml`). +- Updated vars.yaml file with the necessary parameters. + +### Step 2: Collect CIB XML Files and copy to management server + +#### 2.1 Collect CIB XML Files + + Before performing offline validation, you need to collect High Availability cluster configuration files (CIB XML files) from the SAP system nodes. This can be done by executing the following command on each node: + + ```bash + cibadmin --query | tee cib + ``` + + This command will create a file named `cib` in the current directory, which contains the cluster configuration in XML format. + +#### 2.2 Create the Required Directory Structure + + Copy these files to the management server under the `WORKSPACES/SYSTEM//offline_validation/` directory, maintaining the structure as shown above. Ensure the directory structure is created as follows: + + ```bash + mkdir -p WORKSPACES/SYSTEM//offline_validation// + ``` + + Place the `cib` file in the respective `/` directory. + +### Step 3: Run Offline Validation + + Execute the sap_automation_qa script for offline validation with the `--offline` flag. The target OS family is a requirement parameter (`target_os_family`) and must be specified using the `--extra-vars` option. + + ```bash + ./scripts/sap_automation_qa.sh --offline --extra-vars='target_os_family=SUSE' + # or + ./scripts/sap_automation_qa.sh --offline --extra-vars='target_os_family=RHEL' + ``` + + Enable verbose logging for troubleshooting: + ```bash + ./scripts/sap_automation_qa.sh --extra-vars='target_os_family=' --offline -vvv + ``` + +### Step 4: View Results + + The validation results will be available in `WORKSPACES/SYSTEM//quality_assurance/` directory. Open the HTML file in a web browser to view the detailed parameter validation table with PASSED/INFO/FAILED statuses. \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index 8220ba41..39c52772 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,14 +4,14 @@ # # pip-compile requirements.in # -ansible-compat==25.5.0 +ansible-compat==25.6.0 # via ansible-lint ansible-core==2.17.12 # via # -r requirements.in # ansible-compat # ansible-lint -ansible-lint==25.5.0 +ansible-lint==25.6.1 # via -r requirements.in ansible-runner==2.4.1 # via -r requirements.in @@ -34,11 +34,11 @@ azure-identity==1.23.0 # via # -r requirements.in # azure-kusto-data -azure-kusto-data==5.0.3 +azure-kusto-data==5.0.4 # via # -r requirements.in # azure-kusto-ingest -azure-kusto-ingest==5.0.3 +azure-kusto-ingest==5.0.4 # via -r requirements.in azure-mgmt-core==1.5.0 # via azure-mgmt-network @@ -56,9 +56,9 @@ black==25.1.0 # via # -r requirements.in # ansible-lint -bracex==2.5.post1 +bracex==2.6 # via wcmatch -certifi==2025.4.26 +certifi==2025.6.15 # via requests cffi==1.17.1 # via cryptography @@ -68,7 +68,7 @@ click==8.2.1 # via # -r requirements.in # black -coverage[toml]==7.9.0 +coverage[toml]==7.9.1 # via # -r requirements.in # pytest-cov @@ -158,12 +158,14 @@ platformdirs==4.3.8 # black # pylint pluggy==1.6.0 - # via pytest + # via + # pytest + # pytest-cov ptyprocess==0.7.0 # via pexpect pycparser==2.22 # via cffi -pygments==2.19.1 +pygments==2.19.2 # via # pytest # rich @@ -173,12 +175,12 @@ pyjwt[crypto]==2.10.1 # pyjwt pylint==3.3.7 # via -r requirements.in -pytest==8.4.0 +pytest==8.4.1 # via # -r requirements.in # pytest-cov # pytest-mock -pytest-cov==6.2.0 +pytest-cov==6.2.1 # via -r requirements.in pytest-mock==3.14.1 # via -r requirements.in @@ -213,7 +215,7 @@ resolvelib==1.0.1 # via ansible-core rich==14.0.0 # via -r requirements.in -rpds-py==0.25.1 +rpds-py==0.26.0 # via # jsonschema # referencing @@ -253,9 +255,9 @@ typing-extensions==4.14.0 # rich tzdata==2025.2 # via pandas -urllib3==2.4.0 +urllib3==2.5.0 # via requests -wcmatch==10.0 +wcmatch==10.1 # via ansible-lint yamllint==1.37.1 # via ansible-lint diff --git a/scripts/sap_automation_qa.sh b/scripts/sap_automation_qa.sh index e8fb538d..7f0aa08c 100755 --- a/scripts/sap_automation_qa.sh +++ b/scripts/sap_automation_qa.sh @@ -3,21 +3,38 @@ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. -set -euo pipefail -# Activate the virtual environment -source "$(realpath $(dirname $(realpath $0))/..)/.venv/bin/activate" +set -eo pipefail + +# Get script directory in a more portable way +script_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +project_root="$(cd "$script_dir/.." && pwd)" -cmd_dir="$(dirname "$(readlink -e "${BASH_SOURCE[0]}")")" +# Activate the virtual environment +if [[ -f "$project_root/.venv/bin/activate" ]]; then + source "$project_root/.venv/bin/activate" +else + echo "ERROR: Virtual environment not found at $project_root/.venv" + echo "Please run setup.sh first to create the virtual environment." + exit 1 +fi + +# Source the utils script for logging and utility functions +source "$script_dir/utils.sh" + +# Use more portable command directory detection +if command -v readlink >/dev/null 2>&1; then + cmd_dir="$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")" +else + # Fallback for systems without readlink -f (like some macOS versions) + cmd_dir="$script_dir" +fi # Set the environment variables export ANSIBLE_COLLECTIONS_PATH=/opt/ansible/collections:${ANSIBLE_COLLECTIONS_PATH:+${ANSIBLE_COLLECTIONS_PATH}} export ANSIBLE_CONFIG="${cmd_dir}/../src/ansible.cfg" export ANSIBLE_MODULE_UTILS="${cmd_dir}/../src/module_utils:${ANSIBLE_MODULE_UTILS:+${ANSIBLE_MODULE_UTILS}}" export ANSIBLE_HOST_KEY_CHECKING=False -# Colors for error messages -RED='\033[0;31m' -GREEN='\033[0;32m' -NC='\033[0m' +set_output_context # Global variable to store the path of the temporary file. temp_file="" @@ -26,6 +43,10 @@ temp_file="" # Sets global ANSIBLE_VERBOSE variable parse_arguments() { ANSIBLE_VERBOSE="" + OFFLINE_MODE="" + TEST_GROUPS="" + TEST_CASES="" + EXTRA_VARS="" for arg in "$@"; do case "$arg" in @@ -37,13 +58,15 @@ parse_arguments() { ;; --test_cases=*) TEST_CASES="${arg#*=}" - # Remove brackets and convert to array TEST_CASES="${TEST_CASES#[}" TEST_CASES="${TEST_CASES%]}" ;; - --extra-vars=*) - EXTRA_VARS="${arg#*=}" - ;; + --extra-vars=*) + EXTRA_VARS="${arg#*=}" + ;; + --offline) + OFFLINE_MODE="true" + ;; -h|--help) show_usage exit 0 @@ -60,33 +83,21 @@ Options: -v, -vv, -vvv, etc. Set Ansible verbosity level --test_groups=GROUP Specify test group to run (e.g., HA_DB_HANA, HA_SCS) --test_cases=[case1,case2] Specify specific test cases to run (comma-separated, in brackets) - --extra-vars=VAR Specify additional Ansible extra variables (e.g., --extra-vars='{"key":"value"}') + --extra-vars=VAR Specify additional Ansible extra variables (e.g., --extra-vars='{"key":"value"}') + --offline Run offline test cases using previously collected CIB data -h, --help Show this help message Examples: $0 --test_groups=HA_DB_HANA --test_cases=[ha-config,primary-node-crash] $0 --test_groups=HA_SCS - $0 --test_groups=HA_DB_HANA --test_cases=[ha-config,primary-node-crash] -vv - $0 --test_groups=HA_DB_HANA --test_cases=[ha-config,primary-node-crash] --extra-vars='{"key":"value"}' + $0 --test_groups=HA_DB_HANA --test_cases=[ha-config,primary-node-crash] -vv + $0 --test_groups=HA_DB_HANA --test_cases=[ha-config,primary-node-crash] --extra-vars='{"key":"value"}' + $0 --test_groups=HA_DB_HANA --test_cases=[ha-config] --offline Configuration is read from vars.yaml file. EOF } -# Print logs with color based on severity. -# :param severity: The severity level of the log (e.g., "INFO", "ERROR"). -# :param message: The message to log. -log() { - local severity=$1 - local message=$2 - - if [[ "$severity" == "ERROR" ]]; then - echo -e "${RED}[ERROR] $message${NC}" - else - echo -e "${GREEN}[INFO] $message${NC}" - fi -} - log "INFO" "ANSIBLE_COLLECTIONS_PATH: $ANSIBLE_COLLECTIONS_PATH" log "INFO" "ANSIBLE_CONFIG: $ANSIBLE_CONFIG" log "INFO" "ANSIBLE_MODULE_UTILS: $ANSIBLE_MODULE_UTILS" @@ -94,13 +105,6 @@ log "INFO" "ANSIBLE_MODULE_UTILS: $ANSIBLE_MODULE_UTILS" # Define the path to the vars.yaml file VARS_FILE="${cmd_dir}/../vars.yaml" -# Check if a command exists. -# :param command: The command to check. -# :return: None. Exits with a non-zero status if the command does not exist. -command_exists() { - command -v "$1" &> /dev/null -} - # Validate input parameters from vars.yaml. # :return: None. Exits with a non-zero status if validation fails. validate_params() { @@ -131,20 +135,6 @@ validate_params() { fi } -# Check if a file exists. -# :param file_path: The path to the file to check. -# :param error_message: The error message to display if the file does not exist. -# :return: None. Exits with a non-zero status if the file does not exist. -check_file_exists() { - local file_path=$1 - local error_message=$2 - log "INFO" "Checking if file exists: $file_path" - if [[ ! -f "$file_path" ]]; then - log "ERROR" "Error: $error_message" - exit 1 - fi -} - # Extract the error message from a command's output. # :param error_output: The output containing the error message. # :return: The extracted error message or a default message if none is found. @@ -161,16 +151,26 @@ extract_error_message() { # Determine the playbook name based on the sap_functional_test_type. # :param test_type: The type of SAP functional test. +# :param offline_mode: Whether to use offline mode (optional). # :return: The name of the playbook. get_playbook_name() { local test_type=$1 + local offline_mode=${2:-""} case "$test_type" in "DatabaseHighAvailability") - echo "playbook_00_ha_db_functional_tests" + if [[ "$offline_mode" == "true" ]]; then + echo "playbook_01_ha_offline_tests" + else + echo "playbook_00_ha_db_functional_tests" + fi ;; "CentralServicesHighAvailability") - echo "playbook_00_ha_scs_functional_tests" + if [[ "$offline_mode" == "true" ]]; then + echo "playbook_01_ha_offline_tests" + else + echo "playbook_00_ha_scs_functional_tests" + fi ;; *) log "ERROR" "Unknown sap_functional_test_type: $test_type" @@ -296,7 +296,6 @@ run_ansible_playbook() { local auth_type=$4 local system_config_folder=$5 - local extra_vars="" if [[ -n "$TEST_GROUPS" || -n "$TEST_CASES" ]]; then local filtered_config @@ -306,99 +305,106 @@ run_ansible_playbook() { fi fi - if [[ -n "$EXTRA_VARS" ]]; then - log a "INFO" "Using additional extra vars: $EXTRA_VARS" - escaped_extra_vars="${EXTRA_VARS//\'/\'\"\'\"\'}" - extra_vars+=" --extra-vars '$escaped_extra_vars'" - fi - - # Set local secret_id and key_vault_id if defined - local secret_id=$(grep "^secret_id:" "$system_params" | awk '{split($0,a,": "); print a[2]}' | xargs || true) - local key_vault_id=$(grep "^key_vault_id:" "$system_params" | awk '{split($0,a,": "); print a[2]}' | xargs || true) - - if [[ -n "$secret_id" ]]; then - log "INFO" "Extracted secret_id: $secret_id" + if [[ -n "$EXTRA_VARS" ]]; then + log "INFO" "Using additional extra vars: $EXTRA_VARS" + escaped_extra_vars="${EXTRA_VARS//\'/\'\"\'\"\'}" + extra_vars+=" --extra-vars '$escaped_extra_vars'" fi - if [[ -n "$key_vault_id" ]]; then - log "INFO" "Extracted key_vault_id: $key_vault_id" - fi + # Skip authentication setup if in offline mode + if [[ "$OFFLINE_MODE" == "true" ]]; then + log "INFO" "Offline mode: Skipping SSH authentication setup" + command="ansible-playbook ${cmd_dir}/../src/$playbook_name.yml -i $system_hosts \ + -e @$VARS_FILE -e @$system_params -e '_workspace_directory=$system_config_folder' $extra_vars --connection=local" + else + # Set local secret_id and key_vault_id if defined + local secret_id=$(grep "^secret_id:" "$system_params" | awk '{split($0,a,": "); print a[2]}' | xargs || true) + local key_vault_id=$(grep "^key_vault_id:" "$system_params" | awk '{split($0,a,": "); print a[2]}' | xargs || true) - if [[ "$auth_type" == "SSHKEY" ]]; then - log "INFO" "Authentication type is SSHKEY." + if [[ -n "$secret_id" ]]; then + log "INFO" "Extracted secret_id: $secret_id" + fi - if [[ -n "$key_vault_id" && -n "$secret_id" ]]; then - log "INFO" "Key Vault ID and Secret ID are set. Retrieving SSH key from Key Vault." - retrieve_secret_from_key_vault "$key_vault_id" "$secret_id" "SSHKEY" + if [[ -n "$key_vault_id" ]]; then + log "INFO" "Extracted key_vault_id: $key_vault_id" + fi - check_file_exists "$temp_file" \ - "Temporary SSH key file not found. Please check the Key Vault secret ID." - command="ansible-playbook ${cmd_dir}/../src/$playbook_name.yml -i $system_hosts --private-key $temp_file \ - -e @$VARS_FILE -e @$system_params -e '_workspace_directory=$system_config_folder' $extra_vars" - else - local ssh_key_dir="${cmd_dir}/../WORKSPACES/SYSTEM/$SYSTEM_CONFIG_NAME" - local ssh_key="" - local extensions=("ppk" "pem" "key" "private" "rsa" "ed25519" "ecdsa" "dsa" "") - - for ext in "${extensions[@]}"; do - if [[ -n "$ext" ]]; then - local key_file="${ssh_key_dir}/ssh_key.${ext}" - else - local key_file="${ssh_key_dir}/ssh_key" + if [[ "$auth_type" == "SSHKEY" ]]; then + log "INFO" "Authentication type is SSHKEY." + + if [[ -n "$key_vault_id" && -n "$secret_id" ]]; then + log "INFO" "Key Vault ID and Secret ID are set. Retrieving SSH key from Key Vault." + retrieve_secret_from_key_vault "$key_vault_id" "$secret_id" "SSHKEY" + + check_file_exists "$temp_file" \ + "Temporary SSH key file not found. Please check the Key Vault secret ID." + command="ansible-playbook ${cmd_dir}/../src/$playbook_name.yml -i $system_hosts --private-key $temp_file \ + -e @$VARS_FILE -e @$system_params -e '_workspace_directory=$system_config_folder' $extra_vars" + else + local ssh_key_dir="${cmd_dir}/../WORKSPACES/SYSTEM/$SYSTEM_CONFIG_NAME" + local ssh_key="" + local extensions=("ppk" "pem" "key" "private" "rsa" "ed25519" "ecdsa" "dsa" "") + + for ext in "${extensions[@]}"; do + if [[ -n "$ext" ]]; then + local key_file="${ssh_key_dir}/ssh_key.${ext}" + else + local key_file="${ssh_key_dir}/ssh_key" + fi + + if [[ -f "$key_file" ]]; then + ssh_key="$key_file" + log "INFO" "Found SSH key file: $ssh_key" + break + fi + done + + if [[ -z "$ssh_key" ]]; then + ssh_key=$(find "$ssh_key_dir" -name "*ssh_key*" -type f | head -n 1) + if [[ -n "$ssh_key" ]]; then + log "INFO" "Found SSH key file with pattern: $ssh_key" + fi fi - if [[ -f "$key_file" ]]; then - ssh_key="$key_file" - log "INFO" "Found SSH key file: $ssh_key" - break - fi - done + check_file_exists "$ssh_key" \ + "SSH key file not found in WORKSPACES/SYSTEM/$SYSTEM_CONFIG_NAME directory. Looked for files with patterns: ssh_key.*, *ssh_key*" - if [[ -z "$ssh_key" ]]; then - ssh_key=$(find "$ssh_key_dir" -name "*ssh_key*" -type f | head -n 1) - if [[ -n "$ssh_key" ]]; then - log "INFO" "Found SSH key file with pattern: $ssh_key" - fi + chmod 600 "$ssh_key" + command="ansible-playbook ${cmd_dir}/../src/$playbook_name.yml -i $system_hosts --private-key $ssh_key \ + -e @$VARS_FILE -e @$system_params -e '_workspace_directory=$system_config_folder' $extra_vars" fi - check_file_exists "$ssh_key" \ - "SSH key file not found in WORKSPACES/SYSTEM/$SYSTEM_CONFIG_NAME directory. Looked for files with patterns: ssh_key.*, *ssh_key*" - - chmod 600 "$ssh_key" - command="ansible-playbook ${cmd_dir}/../src/$playbook_name.yml -i $system_hosts --private-key $ssh_key \ - -e @$VARS_FILE -e @$system_params -e '_workspace_directory=$system_config_folder' $extra_vars" - fi - - elif [[ "$auth_type" == "VMPASSWORD" ]]; then - log "INFO" "Authentication type is VMPASSWORD." - - if [[ -n "$key_vault_id" && -n "$secret_id" ]]; then - log "INFO" "Key Vault ID and Secret ID are set. Retrieving VM password from Key Vault." - retrieve_secret_from_key_vault "$key_vault_id" "$secret_id" "VMPASSWORD" + elif [[ "$auth_type" == "VMPASSWORD" ]]; then + log "INFO" "Authentication type is VMPASSWORD." + + if [[ -n "$key_vault_id" && -n "$secret_id" ]]; then + log "INFO" "Key Vault ID and Secret ID are set. Retrieving VM password from Key Vault." + retrieve_secret_from_key_vault "$key_vault_id" "$secret_id" "VMPASSWORD" + + check_file_exists "$temp_file" \ + "Temporary password file not found. Please check the Key Vault secret ID." + command="ansible-playbook ${cmd_dir}/../src/$playbook_name.yml -i $system_hosts \ + --extra-vars \"ansible_ssh_pass=$(cat $temp_file)\" --extra-vars @$VARS_FILE -e @$system_params \ + -e '_workspace_directory=$system_config_folder' $extra_vars" + else + local password_file="${cmd_dir}/../WORKSPACES/SYSTEM/$SYSTEM_CONFIG_NAME/password" + check_file_exists "$password_file" \ + "password file not found in WORKSPACES/SYSTEM/$SYSTEM_CONFIG_NAME directory." + command="ansible-playbook ${cmd_dir}/../src/$playbook_name.yml -i $system_hosts \ + --extra-vars \"ansible_ssh_pass=$(cat $password_file)\" --extra-vars @$VARS_FILE -e @$system_params \ + -e '_workspace_directory=$system_config_folder' $extra_vars" + fi - check_file_exists "$temp_file" \ - "Temporary SSH key file not found. Please check the Key Vault secret ID." - command="ansible-playbook ${cmd_dir}/../src/$playbook_name.yml -i $system_hosts \ - --extra-vars \"ansible_ssh_pass=$(cat $temp_file)\" --extra-vars @$VARS_FILE -e @$system_params \ - -e '_workspace_directory=$system_config_folder'" else - local password_file="${cmd_dir}/../WORKSPACES/SYSTEM/$SYSTEM_CONFIG_NAME/password" - check_file_exists "$password_file" \ - "password file not found in WORKSPACES/SYSTEM/$SYSTEM_CONFIG_NAME directory." - command="ansible-playbook ${cmd_dir}/../src/$playbook_name.yml -i $system_hosts \ - --extra-vars \"ansible_ssh_pass=$(cat $password_file)\" --extra-vars @$VARS_FILE -e @$system_params \ - -e '_workspace_directory=$system_config_folder'" + log "ERROR" "Unknown authentication type: $auth_type" + exit 1 fi - - else - log "ERROR" "Unknown authentication type: $auth_type" - exit 1 fi - # Add verbosity if specified - if [[ -n "$ANSIBLE_VERBOSE" ]]; then - command+=" $ANSIBLE_VERBOSE" - fi + # Add verbosity if specified + if [[ -n "$ANSIBLE_VERBOSE" ]]; then + command+=" $ANSIBLE_VERBOSE" + fi log "INFO" "Running ansible playbook... Command: $command" eval $command @@ -420,7 +426,6 @@ main() { log "INFO" "Activate the virtual environment..." set -e - # Parse command line arguments parse_arguments "$@" if [[ -n "$TEST_GROUPS" ]]; then @@ -429,6 +434,9 @@ main() { if [[ -n "$TEST_CASES" ]]; then log "INFO" "Test cases specified: $TEST_CASES" fi + if [[ "$OFFLINE_MODE" == "true" ]]; then + log "INFO" "Offline mode enabled - using previously collected CIB data" + fi # Validate parameters validate_params @@ -448,7 +456,23 @@ main() { check_file_exists "$SYSTEM_PARAMS" \ "sap-parameters.yaml not found in WORKSPACES/SYSTEM/$SYSTEM_CONFIG_NAME directory." - playbook_name=$(get_playbook_name "$sap_functional_test_type") + if [[ "$OFFLINE_MODE" == "true" ]]; then + local crm_report_dir="$SYSTEM_CONFIG_FOLDER/offline_validation" + if [[ ! -d "$crm_report_dir" ]]; then + log "ERROR" "Offline mode requires CIB data in $crm_report_dir directory. Please run online tests first to collect CIB data." + exit 1 + fi + + local cib_files=$(find "$crm_report_dir" -name "cib" -type f 2>/dev/null | wc -l) + if [[ "$cib_files" -eq 0 ]]; then + log "ERROR" "No CIB files found in $crm_report_dir. Please run online tests first to collect CIB data." + exit 1 + fi + + log "INFO" "Found $cib_files CIB file(s) for offline analysis" + fi + + playbook_name=$(get_playbook_name "$sap_functional_test_type" "$OFFLINE_MODE") log "INFO" "Using playbook: $playbook_name." run_ansible_playbook "$playbook_name" "$SYSTEM_HOSTS" "$SYSTEM_PARAMS" "$AUTHENTICATION_TYPE" "$SYSTEM_CONFIG_FOLDER" diff --git a/scripts/setup.sh b/scripts/setup.sh index a47a65d0..47b6a249 100755 --- a/scripts/setup.sh +++ b/scripts/setup.sh @@ -4,55 +4,26 @@ # Licensed under the MIT License. set -euo pipefail -# Function to check if a command exists -command_exists() { - command -v "$1" &> /dev/null -} -export ANSIBLE_HOST_KEY_CHECKING=False - -RED='\033[0;31m' -GREEN='\033[0;32m' -NC='\033[0m' - -# Function to print logs with color based on severity -log() { - local severity=$1 - local message=$2 - - if [[ "$severity" == "ERROR" ]]; then - echo -e "${RED}[ERROR] $message${NC}" - else - echo -e "${GREEN}[INFO] $message${NC}" - fi -} -# Check if ansible is installed, if not, install it -install_packages() { - local packages=("$@") - local to_install=() - for package in "${packages[@]}"; do - if ! command_exists "$package"; then - log "INFO" "$package is not installed. Adding to install list..." - to_install+=("$package") - else - log "INFO" "$package is already installed." - fi - done +# Source the utils script for logging and utility functions +script_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +source "${script_dir}/utils.sh" +set_output_context - if [ ${#to_install[@]} -ne 0 ]; then - log "INFO" "Updating package list and installing missing packages..." - if sudo apt update -y && sudo apt install -y "${to_install[@]}"; then - log "INFO" "Packages installed successfully." - else - log "ERROR" "Failed to install packages." - fi - fi -} +# Ensure we're in the project root directory +cd "$(dirname "$script_dir")" packages=("python3-pip" "ansible" "sshpass" "python3-venv") - install_packages "${packages[@]}" +# Verify Python3 is available +if ! command_exists python3; then + log "ERROR" "Python3 is not available after installation. Please install Python3 manually." + exit 1 +fi + + +# Create virtual environment if it doesn't exist if [ ! -d ".venv" ]; then log "INFO" "Creating Python virtual environment..." if python3 -m venv .venv; then @@ -73,7 +44,10 @@ else fi log "INFO" "Installing Python packages..." -if pip install azure-kusto-data azure-kusto-ingest; then +if ! pip install --upgrade pip; then + log "ERROR" "Failed to upgrade pip." +fi +if pip install pyyaml requests azure-identity azure-kusto-data azure-kusto-ingest azure-mgmt-network azure-storage-blob azure-storage-queue; then log "INFO" "Python packages installed successfully." else log "ERROR" "Failed to install Python packages." @@ -81,4 +55,9 @@ fi log "INFO" "Which Python: $(which python)" +export ANSIBLE_HOST_KEY_CHECKING=False export ANSIBLE_PYTHON_INTERPRETER=$(which python3) + +log "INFO" "Setup completed successfully!" +log "INFO" "Virtual environment is located at: $(pwd)/.venv" +log "INFO" "To activate the virtual environment manually, run: source .venv/bin/activate" diff --git a/scripts/utils.sh b/scripts/utils.sh new file mode 100644 index 00000000..2600e47b --- /dev/null +++ b/scripts/utils.sh @@ -0,0 +1,235 @@ +#!/bin/bash + +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +# Set color codes for output +set_output_context() { + RED='\033[0;31m' + GREEN='\033[0;32m' + NC='\033[0m' +} + +# Print logs with color based on severity. +# :param severity: The severity level of the log (e.g., "INFO", "ERROR"). +# :param message: The message to log. +log() { + local severity=$1 + local message=$2 + + if [[ "$severity" == "ERROR" ]]; then + echo -e "${RED}[ERROR] $message${NC}" + else + echo -e "${GREEN}[INFO] $message${NC}" + fi +} + +# Check if a command exists. +# :param command: The command to check. +# :return: None. Exits with a non-zero status if the command does not exist. +command_exists() { + command -v "$1" &> /dev/null +} + + +# Check if a file exists. +# :param file_path: The path to the file to check. +# :param error_message: The error message to display if the file does not exist. +# :return: None. Exits with a non-zero status if the file does not exist. +check_file_exists() { + local file_path=$1 + local error_message=$2 + log "INFO" "Checking if file exists: $file_path" + if [[ ! -f "$file_path" ]]; then + log "ERROR" "Error: $error_message" + exit 1 + fi +} + +# Detect the Linux distribution +detect_distro() { + if [[ -f /etc/os-release ]]; then + . /etc/os-release + DISTRO=$ID + DISTRO_FAMILY=$ID_LIKE + elif command_exists lsb_release; then + DISTRO=$(lsb_release -si | tr '[:upper:]' '[:lower:]') + elif [[ -f /etc/redhat-release ]]; then + DISTRO="rhel" + elif [[ -f /etc/debian_version ]]; then + DISTRO="debian" + elif [[ -f /etc/SuSE-release ]]; then + DISTRO="suse" + else + log "ERROR" "Cannot detect Linux distribution" + exit 1 + fi + case "$DISTRO" in + ubuntu|debian) + DISTRO_FAMILY="debian" + ;; + rhel|centos|fedora|rocky|almalinux) + DISTRO_FAMILY="rhel" + ;; + opensuse*|sles|suse) + DISTRO_FAMILY="suse" + ;; + *) + if [[ -n "$DISTRO_FAMILY" ]]; then + case "$DISTRO_FAMILY" in + *debian*) + DISTRO_FAMILY="debian" + ;; + *rhel*|*fedora*) + DISTRO_FAMILY="rhel" + ;; + *suse*) + DISTRO_FAMILY="suse" + ;; + esac + else + log "ERROR" "Unsupported Linux distribution: $DISTRO" + exit 1 + fi + ;; + esac + + log "INFO" "Detected distribution: $DISTRO (family: $DISTRO_FAMILY)" +} + +# Get package manager commands based on distribution +get_package_manager_commands() { + detect_distro + + case "$DISTRO_FAMILY" in + debian) + PKG_UPDATE="apt update -y" + PKG_INSTALL="apt install -y" + PKG_CHECK="dpkg -l" + ;; + rhel) + if command_exists dnf; then + PKG_UPDATE="dnf makecache" + PKG_INSTALL="dnf install -y" + PKG_CHECK="rpm -q" + elif command_exists yum; then + PKG_UPDATE="yum makecache" + PKG_INSTALL="yum install -y" + PKG_CHECK="rpm -q" + else + log "ERROR" "Neither dnf nor yum package manager found" + exit 1 + fi + ;; + suse) + PKG_UPDATE="zypper refresh" + PKG_INSTALL="zypper install -y" + PKG_CHECK="rpm -q" + ;; + *) + log "ERROR" "Unsupported distribution family: $DISTRO_FAMILY" + exit 1 + ;; + esac +} + +# Map generic package names to distribution-specific names +map_package_names() { + local generic_packages=("$@") + local mapped_packages=() + for package in "${generic_packages[@]}"; do + case "$package" in + python3-pip) + case "$DISTRO_FAMILY" in + debian) mapped_packages+=("python3-pip") ;; + rhel) mapped_packages+=("python3-pip") ;; + suse) mapped_packages+=("python3-pip") ;; + esac + ;; + ansible) + case "$DISTRO_FAMILY" in + debian) mapped_packages+=("ansible") ;; + rhel) mapped_packages+=("ansible-core") ;; + suse) mapped_packages+=("ansible") ;; + esac + ;; + sshpass) + case "$DISTRO_FAMILY" in + debian) mapped_packages+=("sshpass") ;; + rhel) mapped_packages+=("sshpass") ;; + suse) + >&2 log "INFO" "Skipping sshpass installation on SUSE systems (not available in standard repositories)" + ;; + esac + ;; + python3-venv) + case "$DISTRO_FAMILY" in + debian) mapped_packages+=("python3-venv") ;; + rhel) + >&2 log "INFO" "Skipping python3-venv installation on Red Hat systems" + ;; + suse) + >&2 log "INFO" "Skipping python3-venv installation on SUSE systems" + ;; + esac + ;; + *) + mapped_packages+=("$package") + ;; + esac + done + + echo "${mapped_packages[@]}" +} + +# Check if a package is installed +is_package_installed() { + local package=$1 + case "$DISTRO_FAMILY" in + debian) + dpkg -l "$package" &> /dev/null + ;; + rhel|suse) + rpm -q "$package" &> /dev/null + ;; + *) + return 1 + ;; + esac +} + +# Install packages with distribution-specific package manager +# :param packages: Array of package names to install. +# :return: None. Exits with a non-zero status if installation fails. +install_packages() { + local packages=("$@") + local to_install=() + get_package_manager_commands + local mapped_packages + read -ra mapped_packages <<< "$(map_package_names "${packages[@]}")" + for package in "${mapped_packages[@]}"; do + if ! command_exists "${package}" && ! is_package_installed "${package}"; then + log "INFO" "$package is not installed. Adding to install list..." + to_install+=("$package") + else + log "INFO" "$package is already installed or available." + fi + done + if [ ${#to_install[@]} -ne 0 ]; then + log "INFO" "Updating package cache and installing missing packages: ${to_install[*]}" + log "INFO" "Updating package cache..." + if ! sudo $PKG_UPDATE; then + log "ERROR" "Failed to update package cache" + exit 1 + fi + log "INFO" "Installing packages: ${to_install[*]}" + if sudo $PKG_INSTALL "${to_install[@]}"; then + log "INFO" "Packages installed successfully." + else + log "ERROR" "Failed to install packages: ${to_install[*]}" + exit 1 + fi + else + log "INFO" "All required packages are already installed." + fi +} diff --git a/src/module_utils/get_pcmk_properties.py b/src/module_utils/get_pcmk_properties.py new file mode 100644 index 00000000..733ce134 --- /dev/null +++ b/src/module_utils/get_pcmk_properties.py @@ -0,0 +1,542 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +""" +Base Pacemaker Cluster Configuration Validator. + +This module provides base functionality to validate Pacemaker cluster configurations +against predefined standards for SAP deployments. + +Classes: + BaseHAClusterValidator: Base validator class for cluster configurations. +""" + +from abc import ABC + +try: + from ansible.module_utils.sap_automation_qa import SapAutomationQA + from ansible.module_utils.enums import OperatingSystemFamily, Parameters, TestStatus + from ansible.module_utils.commands import CIB_ADMIN +except ImportError: + from src.module_utils.sap_automation_qa import SapAutomationQA + from src.module_utils.enums import OperatingSystemFamily, Parameters, TestStatus + from src.module_utils.commands import CIB_ADMIN + + +class BaseHAClusterValidator(SapAutomationQA, ABC): + """ + Base class for validating DB/SCS High Availability cluster configurations. + + This abstract base class provides common functionality for validating + Pacemaker cluster configurations against predefined standards for SAP deployments. + It contains shared methods for parsing and validating cluster configurations. + + Attributes: + BASIC_CATEGORIES (Dict): Mapping of basic configuration categories to their XPaths + RESOURCE_CATEGORIES (Dict): Mapping of resource types to their XPaths (in subclasses) + """ + + BASIC_CATEGORIES = { + "crm_config": (".//cluster_property_set", "CRM_CONFIG_DEFAULTS"), + "rsc_defaults": (".//meta_attributes", "RSC_DEFAULTS"), + "op_defaults": (".//meta_attributes", "OP_DEFAULTS"), + } + + CONSTRAINTS_CATEGORIES = (".//*", "CONSTRAINTS_DEFAULTS") + RESOURCE_CATEGORIES = {} + + def __init__( + self, + os_type: OperatingSystemFamily, + sid: str, + virtual_machine_name: str, + constants: dict, + fencing_mechanism: str, + cib_output: str = "", + category=None, + ): + """ + Initialize the base validator. + + :param os_type: Operating system family + :type os_type: OperatingSystemFamily + :param sid: SAP System ID + :type sid: str + :param virtual_machine_name: Name of the virtual machine + :type virtual_machine_name: str + :param constants: Dictionary of constants for validation + :type constants: dict + :param fencing_mechanism: Type of fencing mechanism used + :type fencing_mechanism: str + :param category: Category being processed (optional) + :type category: str + """ + super().__init__() + self.os_type = os_type.value.upper() + self.category = category + self.sid = sid + self.virtual_machine_name = virtual_machine_name + self.fencing_mechanism = fencing_mechanism + self.constants = constants + self.cib_output = cib_output + + def _get_expected_value(self, category, name): + """ + Get expected value for a given configuration parameter. + + :param category: The category of the configuration parameter. + :type category: str + :param name: The name of the configuration parameter. + :type name: str + :return: The expected value for the configuration parameter. + :rtype: str + """ + _, defaults_key = self.BASIC_CATEGORIES[category] + + fence_config = self.constants["VALID_CONFIGS"].get(self.fencing_mechanism, {}) + os_config = self.constants["VALID_CONFIGS"].get(self.os_type, {}) + + return fence_config.get(name) or os_config.get(name, self.constants[defaults_key].get(name)) + + def _get_resource_expected_value(self, resource_type, section, param_name, op_name=None): + """ + Get expected value for a given resource configuration parameter. + + :param resource_type: The type of the resource. + :type resource_type: str + :param section: The section of the resource configuration. + :type section: str + :param param_name: The name of the configuration parameter. + :type param_name: str + :param op_name: The name of the operation (if applicable), defaults to None + :type op_name: str, optional + :return: The expected value for the resource configuration parameter. + :rtype: str + """ + resource_defaults = ( + self.constants["RESOURCE_DEFAULTS"].get(self.os_type, {}).get(resource_type, {}) + ) + + if section == "meta_attributes": + return resource_defaults.get("meta_attributes", {}).get(param_name) + elif section == "operations": + ops = resource_defaults.get("operations", {}).get(op_name, {}) + return ops.get(param_name) + elif section == "instance_attributes": + return resource_defaults.get("instance_attributes", {}).get(param_name) + return None + + def _create_parameter( + self, + category, + name, + value, + expected_value=None, + id=None, + subcategory=None, + op_name=None, + ): + """ + Create a parameter dictionary for the given configuration. + + :param category: The category of the configuration parameter. + :type category: str + :param name: The name of the configuration parameter. + :type name: str + :param value: The value of the configuration parameter. + :type value: str + :param expected_value: The expected value for the configuration parameter, defaults to None + :type expected_value: str, optional + :param id: The ID of the configuration parameter, defaults to None + :type id: str, optional + :param subcategory: The subcategory of the configuration parameter, defaults to None + :type subcategory: str, optional + :param op_name: The name of the operation (if applicable), defaults to None + :type op_name: str, optional + :return: A dictionary representing the parameter. + :rtype: dict + """ + if expected_value is None: + expected_value = self._get_expected_value_for_category( + category, subcategory, name, op_name + ) + + status = self._determine_parameter_status(value, expected_value) + + if isinstance(expected_value, list): + expected_value = expected_value[0] if expected_value else "" + + return Parameters( + category=f"{category}_{subcategory}" if subcategory else category, + id=id if id else "", + name=name if not op_name else f"{op_name}_{name}", + value=value, + expected_value=expected_value if expected_value is not None else "", + status=status if status else TestStatus.ERROR.value, + ).to_dict() + + def _get_expected_value_for_category(self, category, subcategory, name, op_name): + """ + Get expected value based on category type. + This method can be overridden by subclasses for custom logic. + + :param category: The category of the configuration parameter. + :type category: str + :param subcategory: The subcategory of the configuration parameter. + :type subcategory: str + :param name: The name of the configuration parameter. + :type name: str + :param op_name: The name of the operation (if applicable). + :type op_name: str + :return: The expected value for the configuration parameter. + :rtype: str or list or dict + """ + if category in self.RESOURCE_CATEGORIES: + return self._get_resource_expected_value( + resource_type=category, + section=subcategory, + param_name=name, + op_name=op_name, + ) + else: + return self._get_expected_value(category, name) + + def _determine_parameter_status(self, value, expected_value): + """ + Determine the status of a parameter based on its value and expected value. + + :param value: The actual value of the parameter. + :type value: str + :param expected_value: The expected value of the parameter. + :type expected_value: str or list or dict + :return: The status of the parameter. + :rtype: str + """ + if expected_value is None or value == "": + return TestStatus.INFO.value + elif isinstance(expected_value, (str, list)): + if isinstance(expected_value, list): + return ( + TestStatus.SUCCESS.value + if str(value) in expected_value + else TestStatus.ERROR.value + ) + else: + return ( + TestStatus.SUCCESS.value + if str(value) == str(expected_value) + else TestStatus.ERROR.value + ) + else: + return TestStatus.ERROR.value + + def _parse_nvpair_elements(self, elements, category, subcategory=None, op_name=None): + """ + Parse nvpair elements and create parameter dictionaries. + + :param elements: List of nvpair elements to parse. + :type elements: list + :param category: The category of the configuration parameter. + :type category: str + :param subcategory: The subcategory of the configuration parameter, defaults to None + :type subcategory: str, optional + :param op_name: The name of the operation (if applicable), defaults to None + :type op_name: str, optional + :return: A list of parameter dictionaries. + :rtype: list + """ + parameters = [] + for nvpair in elements: + name = nvpair.get("name", "") + if name in ["passwd", "password", "login"]: + continue + else: + parameters.append( + self._create_parameter( + category=category, + subcategory=subcategory, + op_name=op_name, + id=nvpair.get("id", ""), + name=name, + value=nvpair.get("value", ""), + ) + ) + return parameters + + def _parse_os_parameters(self): + """ + Parse and validate OS-specific configuration parameters. + + :return: A list of parameter dictionaries containing validation results. + :rtype: list + """ + parameters = [] + + os_parameters = self.constants["OS_PARAMETERS"].get("DEFAULTS", {}) + + for section, params in os_parameters.items(): + for param_name, expected_value in params.items(): + value = ( + self.execute_command_subprocess(command=[section, param_name]) + .strip() + .split("\n")[0] + ) + parameters.append( + self._create_parameter( + category="os", + id=section, + name=param_name, + value=value, + expected_value=expected_value, + ) + ) + + return parameters + + def _parse_basic_config(self, element, category, subcategory=None): + """ + Parse basic configuration parameters + + :param element: The XML element to parse. + :type element: xml.etree.ElementTree.Element + :param category: The category of the configuration parameter. + :type category: str + :param subcategory: The subcategory of the configuration parameter, defaults to None + :type subcategory: str, optional + :return: A list of parameter dictionaries. + :rtype: list + """ + parameters = [] + for nvpair in element.findall(".//nvpair"): + parameters.append( + self._create_parameter( + category=category, + subcategory=subcategory, + name=nvpair.get("name", ""), + value=nvpair.get("value", ""), + id=nvpair.get("id", ""), + ) + ) + return parameters + + def _parse_resource(self, element, category): + """ + Parse resource-specific configuration parameters + + :param element: The XML element to parse. + :type element: xml.etree.ElementTree.Element + :param category: The category of the resource. + :type category: str + :return: A list of parameter dictionaries. + :rtype: list + """ + parameters = [] + if category.endswith("_meta"): + param_dict = self._parse_nvpair_elements( + elements=element.findall(".//nvpair"), + category=category.split("_")[0], + subcategory="meta_attributes", + ) + parameters.extend(param_dict) + + for attr in ["meta_attributes", "instance_attributes"]: + attr_elements = element.find(f".//{attr}") + if attr_elements is not None: + parameters.extend( + self._parse_nvpair_elements( + elements=attr_elements.findall(".//nvpair"), + category=category, + subcategory=attr, + ) + ) + + operations = element.find(".//operations") + if operations is not None: + for operation in operations.findall(".//op"): + for op_type in ["timeout", "interval"]: + parameters.append( + self._create_parameter( + category=category, + subcategory="operations", + id=operation.get("id", ""), + name=op_type, + op_name=operation.get("name", ""), + value=operation.get(op_type, ""), + ) + ) + return parameters + + def _parse_constraints(self, root): + """ + Parse constraints configuration parameters + + :param root: The XML root element to parse. + :type root: xml.etree.ElementTree.Element + :return: A list of parameter dictionaries. + :rtype: list + """ + parameters = [] + for element in root: + tag = element.tag + if tag in self.constants["CONSTRAINTS"]: + for attr, expected in self.constants["CONSTRAINTS"][tag].items(): + if element.get(attr) is not None: + parameters.append( + self._create_parameter( + category="constraints", + subcategory=tag, + id=element.get("id", ""), + name=attr, + value=element.get(attr), + expected_value=expected, + ) + ) + else: + continue + else: + continue + return parameters + + def _parse_resources_section(self, root): + """ + Parse resources section - can be overridden by subclasses for custom resource parsing. + + :param root: The XML root element to parse. + :type root: xml.etree.ElementTree.Element + :return: A list of parameter dictionaries. + :rtype: list + """ + parameters = [] + for sub_category, xpath in self.RESOURCE_CATEGORIES.items(): + elements = root.findall(xpath) + for element in elements: + parameters.extend(self._parse_resource(element, sub_category)) + return parameters + + def _get_additional_parameters(self): + """ + Get additional parameters specific to subclasses. + This method should be overridden by subclasses to add their specific parameters. + + :return: A list of additional parameter dictionaries. + :rtype: list + """ + return [] + + def _should_skip_scope(self, scope): + """ + Determine if a scope should be skipped. + Can be overridden by subclasses for custom logic. + + :param scope: The scope to check. + :type scope: str + :return: True if scope should be skipped, False otherwise. + :rtype: bool + """ + return scope == "op_defaults" and self.os_type == OperatingSystemFamily.REDHAT.value.upper() + + def _get_scope_from_cib(self, scope): + """ + Extract specific scope data from loaded CIB data. + + :param scope: The scope to extract (e.g., 'resources', 'constraints') + :type scope: str + :return: XML element for the scope + :rtype: xml.etree.ElementTree.Element or None + """ + if self.cib_output: + self.cib_output = ( + self.parse_xml_output(self.cib_output) + if isinstance(self.cib_output, str) + else self.cib_output + ) + else: + return None + + scope_mappings = { + "resources": ".//resources", + "constraints": ".//constraints", + "crm_config": ".//crm_config", + "rsc_defaults": ".//rsc_defaults", + "op_defaults": ".//op_defaults", + } + + xpath = scope_mappings.get(scope) + if xpath: + return self.cib_output.find(xpath) + return None + + def parse_ha_cluster_config(self): + """ + Parse HA cluster configuration XML and return a list of properties. + This is the main orchestration method that coordinates all parsing activities. + """ + parameters = [] + + scopes = [ + "rsc_defaults", + "crm_config", + "op_defaults", + "constraints", + "resources", + ] + + for scope in scopes: + if self._should_skip_scope(scope): + continue + + self.category = scope + if self.cib_output: + root = self._get_scope_from_cib(scope) + else: + root = self.parse_xml_output( + self.execute_command_subprocess(CIB_ADMIN(scope=scope)) + ) + if not root: + continue + + try: + if self.category in self.BASIC_CATEGORIES: + xpath = self.BASIC_CATEGORIES[self.category][0] + for element in root.findall(xpath): + parameters.extend(self._parse_basic_config(element, self.category)) + + elif self.category == "resources": + parameters.extend(self._parse_resources_section(root)) + + elif self.category == "constraints": + parameters.extend(self._parse_constraints(root)) + + except Exception as ex: + self.result["message"] += f"Failed to get {self.category} configuration: {str(ex)}" + continue + try: + if not self.cib_output: + parameters.extend(self._parse_os_parameters()) + else: + self.result["message"] += "CIB output provided, skipping OS parameters parsing. " + except Exception as ex: + self.result["message"] += f"Failed to get OS parameters: {str(ex)} \n" + try: + if not self.cib_output: + parameters.extend(self._get_additional_parameters()) + else: + self.result[ + "message" + ] += "CIB output provided, skipping additional parameters parsing. " + except Exception as ex: + self.result["message"] += f"Failed to get additional parameters: {str(ex)} \n" + failed_parameters = [ + param + for param in parameters + if param.get("status", TestStatus.ERROR.value) == TestStatus.ERROR.value + ] + self.result.update( + { + "details": {"parameters": parameters}, + "status": ( + TestStatus.ERROR.value if failed_parameters else TestStatus.SUCCESS.value + ), + } + ) + self.result["message"] += "HA Parameter Validation completed successfully. " diff --git a/src/module_utils/sap_automation_qa.py b/src/module_utils/sap_automation_qa.py index 41fb4091..4dc6dae2 100644 --- a/src/module_utils/sap_automation_qa.py +++ b/src/module_utils/sap_automation_qa.py @@ -108,18 +108,18 @@ def execute_command_subprocess(self, command: Any, shell_command: bool = False) self.handle_error(ex, "") return "" - def parse_xml_output(self, xml_output: str) -> Optional[ET.Element]: + def parse_xml_output(self, xml_output: str) -> ET.Element: """ Parses the XML output and returns the root element. :param xml_output: XML output to parse :type xml_output: str :return: The root element of the XML output - :rtype: Optional[ET.Element] + :rtype: ET.Element """ if xml_output.startswith("<"): return ET.fromstring(xml_output) - return None + return ET.Element("root") def get_result(self) -> Dict[str, Any]: """ diff --git a/src/modules/get_pcmk_properties_db.py b/src/modules/get_pcmk_properties_db.py index f044b302..2eeedb15 100644 --- a/src/modules/get_pcmk_properties_db.py +++ b/src/modules/get_pcmk_properties_db.py @@ -16,18 +16,11 @@ from ansible.module_utils.facts.compat import ansible_facts try: - from ansible.module_utils.sap_automation_qa import SapAutomationQA - from ansible.module_utils.enums import ( - OperatingSystemFamily, - Parameters, - TestStatus, - HanaSRProvider, - ) - from ansible.module_utils.commands import CIB_ADMIN + from ansible.module_utils.get_pcmk_properties import BaseHAClusterValidator + from ansible.module_utils.enums import OperatingSystemFamily, HanaSRProvider except ImportError: - from src.module_utils.sap_automation_qa import SapAutomationQA - from src.module_utils.enums import OperatingSystemFamily, Parameters, TestStatus, HanaSRProvider - from src.module_utils.commands import CIB_ADMIN + from src.module_utils.get_pcmk_properties import BaseHAClusterValidator + from src.module_utils.enums import OperatingSystemFamily, HanaSRProvider DOCUMENTATION = r""" --- @@ -59,11 +52,6 @@ - Type of fencing mechanism used type: str required: true - os_version: - description: - - Operating system version - type: str - required: true pcmk_constants: description: - Dictionary of constants for validation @@ -74,6 +62,11 @@ - SAP HANA SR provider type (e.g., SAPHanaSR, SAPHanaSR-angi) type: str required: true + cib_output: + description: + - Output from cibadmin command to query Pacemaker configuration + type: str + required: false author: - Microsoft Corporation notes: @@ -92,7 +85,6 @@ instance_number: "00" virtual_machine_name: "{{ ansible_hostname }}" fencing_mechanism: "sbd" - os_version: "{{ ansible_distribution_version }}" pcmk_constants: "{{ pcmk_validation_constants }}" register: pcmk_validation_result @@ -155,27 +147,15 @@ """ -class HAClusterValidator(SapAutomationQA): +class HAClusterValidator(BaseHAClusterValidator): """ - Validates High Availability cluster configurations. - - This class validates Pacemaker cluster configurations against predefined - standards for SAP HANA deployments. It checks both basic cluster properties - and resource-specific configurations. + Validates High Availability cluster configurations for SAP HANA. - Attributes: - BASIC_CATEGORIES (Dict): Mapping of basic configuration categories to their XPaths - RESOURCE_CATEGORIES (Dict): Mapping of resource types to their XPaths + This class extends BaseHAClusterValidator to provide HANA-specific validation + functionality including global.ini parameter validation and HANA-specific + resource configurations. """ - BASIC_CATEGORIES = { - "crm_config": (".//cluster_property_set", "CRM_CONFIG_DEFAULTS"), - "rsc_defaults": (".//meta_attributes", "RSC_DEFAULTS"), - "op_defaults": (".//meta_attributes", "OP_DEFAULTS"), - } - - CONSTRAINTS_CATEGORIES = (".//*", "CONSTRAINTS_DEFAULTS") - RESOURCE_CATEGORIES = { "sbd_stonith": ".//primitive[@type='external/sbd']", "fence_agent": ".//primitive[@type='fence_azure_arm']", @@ -194,208 +174,54 @@ class HAClusterValidator(SapAutomationQA): def __init__( self, os_type: OperatingSystemFamily, - os_version: str, sid: str, instance_number: str, fencing_mechanism: str, virtual_machine_name: str, constants: dict, saphanasr_provider: HanaSRProvider, + cib_output: str, category=None, ): - super().__init__() - self.os_type = os_type.value.upper() - self.os_version = os_version - self.category = category - self.sid = sid + super().__init__( + os_type=os_type, + sid=sid, + virtual_machine_name=virtual_machine_name, + constants=constants, + fencing_mechanism=fencing_mechanism, + category=category, + cib_output=cib_output, + ) self.instance_number = instance_number - self.fencing_mechanism = fencing_mechanism - self.virtual_machine_name = virtual_machine_name - self.constants = constants self.saphanasr_provider = saphanasr_provider self.parse_ha_cluster_config() - def _get_expected_value(self, category, name): + def _parse_resources_section(self, root): """ - Get expected value for a given configuration parameter. - - :param category: The category of the configuration parameter. - :type category: str - :param name: The name of the configuration parameter. - :type name: str - :return: The expected value for the configuration parameter. - :rtype: str - """ - _, defaults_key = self.BASIC_CATEGORIES[category] - - fence_config = self.constants["VALID_CONFIGS"].get(self.fencing_mechanism, {}) - os_config = self.constants["VALID_CONFIGS"].get(self.os_type, {}) - - return fence_config.get(name) or os_config.get(name, self.constants[defaults_key].get(name)) + Parse resources section with HANA-specific logic. - def _get_resource_expected_value(self, resource_type, section, param_name, op_name=None): - """ - Get expected value for a given resource configuration parameter. - - :param resource_type: The type of the resource. - :type resource_type: str - :param section: The section of the resource configuration. - :type section: str - :param param_name: The name of the configuration parameter. - :type param_name: str - :param op_name: The name of the operation (if applicable), defaults to None - :type op_name: str, optional - :return: The expected value for the resource configuration parameter. - :rtype: str - """ - resource_defaults = ( - self.constants["RESOURCE_DEFAULTS"].get(self.os_type, {}).get(resource_type, {}) - ) - - if section == "meta_attributes": - return resource_defaults.get("meta_attributes", {}).get(param_name) - elif section == "operations": - ops = resource_defaults.get("operations", {}).get(op_name, {}) - return ops.get(param_name) - elif section == "instance_attributes": - return resource_defaults.get("instance_attributes", {}).get(param_name) - return None - - def _create_parameter( - self, - category, - name, - value, - expected_value=None, - id=None, - subcategory=None, - op_name=None, - ): - """ - Create a parameter dictionary for the given configuration. - - :param category: The category of the configuration parameter. - :type category: str - :param name: The name of the configuration parameter. - :type name: str - :param value: The value of the configuration parameter. - :type value: str - :param expected_value: The expected value for the configuration parameter, defaults to None - :type expected_value: str, optional - :param id: The ID of the configuration parameter, defaults to None - :type id: str, optional - :param subcategory: The subcategory of the configuration parameter, defaults to None - :type subcategory: str, optional - :param op_name: The name of the operation (if applicable), defaults to None - :type op_name: str, optional - :return: A dictionary representing the parameter. - :rtype: dict - """ - if expected_value is None: - if category in self.RESOURCE_CATEGORIES: - expected_value = self._get_resource_expected_value( - resource_type=category, - section=subcategory, - param_name=name, - op_name=op_name, - ) - else: - expected_value = self._get_expected_value(category, name) - - if expected_value is None or value == "": - status = TestStatus.INFO.value - elif isinstance(expected_value, (str, list)): - if isinstance(expected_value, list): - status = ( - TestStatus.SUCCESS.value - if str(value) in expected_value - else TestStatus.ERROR.value - ) - expected_value = expected_value[0] - else: - status = ( - TestStatus.SUCCESS.value - if str(value) == str(expected_value) - else TestStatus.ERROR.value - ) - else: - status = TestStatus.ERROR.value - - return Parameters( - category=f"{category}_{subcategory}" if subcategory else category, - id=id if id else "", - name=name if not op_name else f"{op_name}_{name}", - value=value, - expected_value=expected_value if expected_value is not None else "", - status=status if status else TestStatus.ERROR.value, - ).to_dict() - - def _parse_nvpair_elements(self, elements, category, subcategory=None, op_name=None): - """ - Parse nvpair elements and create parameter dictionaries. - - :param elements: List of nvpair elements to parse. - :type elements: list - :param category: The category of the configuration parameter. - :type category: str - :param subcategory: The subcategory of the configuration parameter, defaults to None - :type subcategory: str, optional - :param op_name: The name of the operation (if applicable), defaults to None - :type op_name: str, optional + :param root: The XML root element to parse. + :type root: xml.etree.ElementTree.Element :return: A list of parameter dictionaries. :rtype: list """ parameters = [] - for nvpair in elements: - name = nvpair.get("name", "") - if name in ["passwd", "password", "login"]: - continue - else: - parameters.append( - self._create_parameter( - category=category, - subcategory=subcategory, - op_name=op_name, - id=nvpair.get("id", ""), - name=name, - value=nvpair.get("value", ""), - ) - ) - return parameters - - def _parse_os_parameters(self): - """ - Parse and validate OS-specific configuration parameters. - - :return: A list of parameter dictionaries containing validation results. - :rtype: list - """ - parameters = [] + resource_categories = self.RESOURCE_CATEGORIES.copy() + if self.saphanasr_provider == HanaSRProvider.ANGI: + resource_categories.pop("topology", None) + else: + resource_categories.pop("angi_topology", None) - os_parameters = self.constants["OS_PARAMETERS"].get("DEFAULTS", {}) - - for section, params in os_parameters.items(): - for param_name, expected_value in params.items(): - value = ( - self.execute_command_subprocess(command=[section, param_name]) - .strip() - .split("\n")[0] - ) - parameters.append( - self._create_parameter( - category="os", - id=section, - name=param_name, - value=value, - expected_value=expected_value, - ) - ) + for sub_category, xpath in resource_categories.items(): + elements = root.findall(xpath) + for element in elements: + parameters.extend(self._parse_resource(element, sub_category)) return parameters def _parse_global_ini_parameters(self): """ - Parse global.ini parameters + Parse global.ini parameters specific to SAP HANA. :return: A list of parameter dictionaries containing validation results. :rtype: list @@ -406,260 +232,108 @@ def _parse_global_ini_parameters(self): .get(self.os_type, {}) .get(self.saphanasr_provider.value, {}) ) - with open( - f"/usr/sap/{self.sid}/SYS/global/hdb/custom/config/global.ini", - "r", - encoding="utf-8", - ) as file: - global_ini_content = file.read().splitlines() - section_start = ( - global_ini_content.index("[ha_dr_provider_sushanasr]") - if self.saphanasr_provider == HanaSRProvider.ANGI - else global_ini_content.index("[ha_dr_provider_SAPHanaSR]") - ) - properties_slice = global_ini_content[section_start + 1 : section_start + 4] - - global_ini_properties = { - key.strip(): val.strip() - for line in properties_slice - for key, sep, val in [line.partition("=")] - if sep - } - - for param_name, expected_value in global_ini_defaults.items(): - value = global_ini_properties.get(param_name, "") - if isinstance(expected_value, list): - if value in expected_value: - expected_value = value - self.log( - logging.INFO, - f"param_name: {param_name}, value: {value}, expected_value: {expected_value}", - ) - parameters.append( - self._create_parameter( - category="global_ini", - name=param_name, - value=value, - expected_value=expected_value, - ) - ) - return parameters - - def _parse_basic_config(self, element, category, subcategory=None): - """ - Parse basic configuration parameters - - :param element: The XML element to parse. - :type element: xml.etree.ElementTree.Element - :param category: The category of the configuration parameter. - :type category: str - :param subcategory: The subcategory of the configuration parameter, defaults to None - :type subcategory: str, optional - :return: A list of parameter dictionaries. - :rtype: list - """ - parameters = [] - for nvpair in element.findall(".//nvpair"): - parameters.append( - self._create_parameter( - category=category, - subcategory=subcategory, - name=nvpair.get("name", ""), - value=nvpair.get("value", ""), - id=nvpair.get("id", ""), - ) + try: + with open( + f"/usr/sap/{self.sid}/SYS/global/hdb/custom/config/global.ini", + "r", + encoding="utf-8", + ) as file: + global_ini_content = file.read().splitlines() + + section_start = ( + global_ini_content.index("[ha_dr_provider_sushanasr]") + if self.saphanasr_provider == HanaSRProvider.ANGI + else global_ini_content.index("[ha_dr_provider_SAPHanaSR]") ) - return parameters + properties_slice = global_ini_content[section_start + 1 : section_start + 4] - def _parse_resource(self, element, category): - """ - Parse resource-specific configuration parameters + global_ini_properties = { + key.strip(): val.strip() + for line in properties_slice + for key, sep, val in [line.partition("=")] + if sep + } - :param element: The XML element to parse. - :type element: xml.etree.ElementTree.Element - :param category: The category of the resource. - :type category: str - :return: A list of parameter dictionaries. - :rtype: list - """ - parameters = [] + for param_name, expected_value in global_ini_defaults.items(): + value = global_ini_properties.get(param_name, "") + if isinstance(expected_value, list): + if value in expected_value: + expected_value = value - if category in ["hana_meta", "topology_meta"]: - param_dict = self._parse_nvpair_elements( - elements=element.findall(".//nvpair"), - category=category.split("_")[0], - subcategory="meta_attributes", - ) - parameters.extend(param_dict) - - for attr in ["meta_attributes", "instance_attributes"]: - attr_elements = element.find(f".//{attr}") - if attr_elements is not None: - parameters.extend( - self._parse_nvpair_elements( - elements=attr_elements.findall(".//nvpair"), - category=category, - subcategory=attr, + self.log( + logging.INFO, + f"param_name: {param_name}, value: {value}, expected_value: {expected_value}", + ) + parameters.append( + self._create_parameter( + category="global_ini", + name=param_name, + value=value, + expected_value=expected_value, ) ) + except Exception as ex: + self.log(logging.ERROR, f"Error parsing global.ini: {str(ex)}") - operations = element.find(".//operations") - if operations is not None: - for operation in operations.findall(".//op"): - for op_type in ["timeout", "interval"]: - parameters.append( - self._create_parameter( - category=category, - subcategory="operations", - id=operation.get("id", ""), - name=op_type, - op_name=operation.get("name", ""), - value=operation.get(op_type, ""), - ) - ) return parameters - def _parse_constraints(self, root): + def _get_additional_parameters(self): """ - Parse constraints configuration parameters + Get HANA-specific additional parameters (global.ini). - :param root: The XML root element to parse. - :type root: xml.etree.ElementTree.Element - :return: A list of parameter dictionaries. + :return: A list of global.ini parameter dictionaries. :rtype: list """ - parameters = [] - for element in root: - tag = element.tag - if tag in self.constants["CONSTRAINTS"]: - for attr, expected in self.constants["CONSTRAINTS"][tag].items(): - if element.get(attr) is not None: - parameters.append( - self._create_parameter( - category="constraints", - subcategory=tag, - id=element.get("id", ""), - name=attr, - value=element.get(attr), - expected_value=expected, - ) - ) - else: - continue - else: - continue - return parameters - - def parse_ha_cluster_config(self): - """ - Parse HA cluster configuration XML and return a list of properties. - """ - parameters = [] - - for scope in [ - "rsc_defaults", - "crm_config", - "op_defaults", - "constraints", - "resources", - ]: - if scope == "op_defaults" and self.os_type == "REDHAT": - continue - self.category = scope - root = self.parse_xml_output(self.execute_command_subprocess(CIB_ADMIN(scope=scope))) - if not root: - continue - - if self.category in self.BASIC_CATEGORIES: - try: - xpath = self.BASIC_CATEGORIES[self.category][0] - for element in root.findall(xpath): - parameters.extend(self._parse_basic_config(element, self.category)) - except Exception as ex: - self.result[ - "message" - ] += f"Failed to get {self.category} configuration: {str(ex)}" - continue - - elif self.category == "resources": - try: - resource_categories = self.RESOURCE_CATEGORIES.copy() - if self.saphanasr_provider == HanaSRProvider.ANGI: - resource_categories.pop("topology", None) - else: - resource_categories.pop("angi_topology", None) - for sub_category, xpath in resource_categories.items(): - elements = root.findall(xpath) - for element in elements: - parameters.extend(self._parse_resource(element, sub_category)) - except Exception as ex: - self.result[ - "message" - ] += f"Failed to get resources configuration for {self.category}: {str(ex)}" - continue - - elif self.category == "constraints": - try: - parameters.extend(self._parse_constraints(root)) - except Exception as ex: - self.result["message"] += f"Failed to get constraints configuration: {str(ex)}" - continue - - try: - parameters.extend(self._parse_os_parameters()) - except Exception as ex: - self.result["message"] += f"Failed to get OS parameters: {str(ex)} \n" - - try: - parameters.extend(self._parse_global_ini_parameters()) - except Exception as ex: - self.result["message"] += f"Failed to get global.ini parameters: {str(ex)} \n" - - failed_parameters = [ - param - for param in parameters - if param.get("status", TestStatus.ERROR.value) == TestStatus.ERROR.value - ] - self.result.update( - { - "details": {"parameters": parameters}, - "status": ( - TestStatus.ERROR.value if failed_parameters else TestStatus.SUCCESS.value - ), - } - ) - self.result["message"] += "HA Parameter Validation completed successfully." + return self._parse_global_ini_parameters() def main() -> None: """ Main entry point for the Ansible module. """ - module = AnsibleModule( - argument_spec=dict( - sid=dict(type="str"), - instance_number=dict(type="str"), - virtual_machine_name=dict(type="str"), - fencing_mechanism=dict(type="str"), - os_version=dict(type="str"), - pcmk_constants=dict(type="dict"), - saphanasr_provider=dict(type="str"), - filter=dict(type="str", required=False, default="os_family"), + + try: + module = AnsibleModule( + argument_spec=dict( + sid=dict(type="str"), + instance_number=dict(type="str"), + virtual_machine_name=dict(type="str"), + fencing_mechanism=dict(type="str"), + pcmk_constants=dict(type="dict"), + saphanasr_provider=dict(type="str"), + cib_output=dict(type="str", required=False, default=""), + os_family=dict(type="str", required=False), + filter=dict(type="str", required=False, default="os_family"), + ) ) - ) + os_family = module.params.get("os_family") or ansible_facts(module).get( + "os_family", "UNKNOWN" + ) + except Exception: + module = AnsibleModule( + argument_spec=dict( + sid=dict(type="str"), + instance_number=dict(type="str"), + virtual_machine_name=dict(type="str"), + fencing_mechanism=dict(type="str"), + pcmk_constants=dict(type="dict"), + saphanasr_provider=dict(type="str"), + cib_output=dict(type="str", required=False, default=""), + os_family=dict(type="str", required=False), + ) + ) + os_family = module.params.get("os_family", "UNKNOWN") validator = HAClusterValidator( - os_type=OperatingSystemFamily( - str(ansible_facts(module).get("os_family", "UNKNOWN")).upper() - ), - os_version=module.params["os_version"], + os_type=OperatingSystemFamily(os_family.upper()), instance_number=module.params["instance_number"], sid=module.params["sid"], virtual_machine_name=module.params["virtual_machine_name"], fencing_mechanism=module.params["fencing_mechanism"], constants=module.params["pcmk_constants"], saphanasr_provider=HanaSRProvider(module.params["saphanasr_provider"]), + cib_output=module.params.get("cib_output"), ) module.exit_json(**validator.get_result()) diff --git a/src/modules/get_pcmk_properties_scs.py b/src/modules/get_pcmk_properties_scs.py index b0cd5063..304f2005 100644 --- a/src/modules/get_pcmk_properties_scs.py +++ b/src/modules/get_pcmk_properties_scs.py @@ -15,13 +15,11 @@ from ansible.module_utils.facts.compat import ansible_facts try: - from ansible.module_utils.sap_automation_qa import SapAutomationQA - from ansible.module_utils.enums import OperatingSystemFamily, Parameters, TestStatus - from ansible.module_utils.commands import CIB_ADMIN + from ansible.module_utils.get_pcmk_properties import BaseHAClusterValidator + from ansible.module_utils.enums import OperatingSystemFamily, TestStatus except ImportError: - from src.module_utils.sap_automation_qa import SapAutomationQA - from src.module_utils.enums import OperatingSystemFamily, Parameters, TestStatus - from src.module_utils.commands import CIB_ADMIN + from src.module_utils.get_pcmk_properties import BaseHAClusterValidator + from src.module_utils.enums import OperatingSystemFamily, TestStatus DOCUMENTATION = r""" @@ -152,27 +150,14 @@ """ -class HAClusterValidator(SapAutomationQA): +class HAClusterValidator(BaseHAClusterValidator): """ - Validates High Availability cluster configurations. + Validates High Availability cluster configurations for SAP ASCS/ERS. - This class validates Pacemaker cluster configurations against predefined - standards for SAP HANA deployments. It checks both basic cluster properties - and resource-specific configurations. - - Attributes: - BASIC_CATEGORIES (Dict): Mapping of basic configuration categories to their XPaths - RESOURCE_CATEGORIES (Dict): Mapping of resource types to their XPaths + This class extends BaseHAClusterValidator to provide ASCS/ERS-specific validation + functionality including NFS provider handling and ASCS/ERS resource configurations. """ - BASIC_CATEGORIES = { - "crm_config": (".//cluster_property_set", "CRM_CONFIG_DEFAULTS"), - "rsc_defaults": (".//meta_attributes", "RSC_DEFAULTS"), - "op_defaults": (".//meta_attributes", "OP_DEFAULTS"), - } - - CONSTRAINTS_CATEGORIES = (".//*", "CONSTRAINTS_DEFAULTS") - RESOURCE_CATEGORIES = { "sbd_stonith": ".//primitive[@type='external/sbd']", "fence_agent": ".//primitive[@type='fence_azure_arm']", @@ -190,410 +175,161 @@ def __init__( virtual_machine_name: str, constants: dict, fencing_mechanism: str, + cib_output: str, nfs_provider=None, category=None, ): - super().__init__() - self.os_type = os_type.value.upper() - self.category = category - self.sid = sid + super().__init__( + os_type=os_type, + sid=sid, + virtual_machine_name=virtual_machine_name, + constants=constants, + fencing_mechanism=fencing_mechanism, + category=category, + cib_output=cib_output, + ) self.scs_instance_number = scs_instance_number self.ers_instance_number = ers_instance_number - self.virtual_machine_name = virtual_machine_name - self.fencing_mechanism = fencing_mechanism - self.constants = constants self.nfs_provider = nfs_provider self.parse_ha_cluster_config() - def _get_expected_value(self, category, name): + def _get_expected_value_for_category(self, category, subcategory, name, op_name): """ - Get expected value for basic configuration parameters. + Get expected value based on category type with SCS-specific logic. - :param category: Category of the parameter + :param category: The category of the configuration parameter. :type category: str - :param name: Name of the parameter + :param subcategory: The subcategory of the configuration parameter. + :type subcategory: str + :param name: The name of the configuration parameter. :type name: str - :return: Expected value of the parameter - :rtype: str - """ - _, defaults_key = self.BASIC_CATEGORIES[category] - - fence_config = self.constants["VALID_CONFIGS"].get(self.fencing_mechanism, {}) - os_config = self.constants["VALID_CONFIGS"].get(self.os_type, {}) - - return fence_config.get(name) or os_config.get(name, self.constants[defaults_key].get(name)) - - def _get_resource_expected_value(self, resource_type, section, param_name, op_name=None): - """ - Get expected value for resource-specific configuration parameters. - - :param resource_type: Type of the resource (e.g., stonith, ipaddr) - :type resource_type: str - :param section: Section of the resource (e.g., meta_attributes, operations) - :type section: str - :param param_name: Name of the parameter - :type param_name: str - :param op_name: Name of the operation (if applicable) + :param op_name: The name of the operation (if applicable). :type op_name: str - :return: Expected value of the parameter - :rtype: str + :return: The expected value for the configuration parameter. + :rtype: str or list or dict """ - resource_defaults = ( - self.constants["RESOURCE_DEFAULTS"].get(self.os_type, {}).get(resource_type, {}) - ) - - if section == "meta_attributes": - return resource_defaults.get("meta_attributes", {}).get(param_name) - elif section == "operations": - ops = resource_defaults.get("operations", {}).get(op_name, {}) - return ops.get(param_name) - elif section == "instance_attributes": - return resource_defaults.get("instance_attributes", {}).get(param_name) - return None + if category in self.RESOURCE_CATEGORIES or category in ["ascs", "ers"]: + return self._get_resource_expected_value( + resource_type=category, + section=subcategory, + param_name=name, + op_name=op_name, + ) + else: + return self._get_expected_value(category, name) - def _create_parameter( - self, - category, - name, - value, - expected_value=None, - id=None, - subcategory=None, - op_name=None, - ): + def _determine_parameter_status(self, value, expected_value): """ - Create a Parameters object for a given configuration parameter. + Determine the status of a parameter with SCS-specific logic for NFS provider. - :param category: Category of the parameter - :type category: str - :param name: Name of the parameter - :type name: str - :param value: Value of the parameter + :param value: The actual value of the parameter. :type value: str - :param expected_value: Expected value of the parameter - :type expected_value: str - :param id: ID of the parameter (optional) - :type id: str - :param subcategory: Subcategory of the parameter (optional) - :type subcategory: str - :param op_name: Operation name (optional) - :type op_name: str - :return: Parameters object - :rtype: Parameters + :param expected_value: The expected value of the parameter. + :type expected_value: str or list or dict + :return: The status of the parameter. + :rtype: str """ - status = None - if expected_value is None: - if category in self.RESOURCE_CATEGORIES or category in ["ascs", "ers"]: - expected_value = self._get_resource_expected_value( - resource_type=category, - section=subcategory, - param_name=name, - op_name=op_name, - ) - else: - expected_value = self._get_expected_value(category, name) - if expected_value is None or value == "": - status = TestStatus.INFO.value + return TestStatus.INFO.value elif isinstance(expected_value, (str, list)): if isinstance(expected_value, list): - status = ( + return ( TestStatus.SUCCESS.value if str(value) in expected_value else TestStatus.ERROR.value ) - expected_value = expected_value[0] else: - status = ( + return ( TestStatus.SUCCESS.value if str(value) == str(expected_value) else TestStatus.ERROR.value ) elif isinstance(expected_value, dict): - expected_value = expected_value.get(self.nfs_provider, "AFS") - status = ( - TestStatus.SUCCESS.value if str(value) in expected_value else TestStatus.ERROR.value + provider_values = expected_value.get(self.nfs_provider, expected_value.get("AFS", [])) + return ( + TestStatus.SUCCESS.value + if str(value) in provider_values + else TestStatus.ERROR.value ) - expected_value = expected_value[0] else: - status = TestStatus.ERROR.value + return TestStatus.ERROR.value - return Parameters( - category=f"{category}_{subcategory}" if subcategory else category, - id=id if id else "", - name=name if not op_name else f"{op_name}_{name}", - value=value, - expected_value=expected_value if expected_value is not None else "", - status=status if status else TestStatus.ERROR.value, - ).to_dict() - - def _parse_nvpair_elements(self, elements, category, subcategory=None, op_name=None): + def _parse_resources_section(self, root): """ - Parse nvpair elements and return a list of Parameters objects. + Parse resources section with ASCS/ERS-specific logic. - :param elements: List of XML elements to parse - :type elements: List[ElementTree.Element] - :param category: Category of the parameters - :type category: str - :param subcategory: Subcategory of the parameters - :type subcategory: str - :param op_name: Operation name (if applicable) - :type op_name: str - :return: List of Parameters objects - :rtype: List[Parameters] + :param root: The XML root element to parse. + :type root: xml.etree.ElementTree.Element + :return: A list of parameter dictionaries. + :rtype: list """ parameters = [] - for nvpair in elements: - name = nvpair.get("name", "") - if name in ["passwd", "password", "login"]: - continue - else: - parameters.append( - self._create_parameter( - category=category, - subcategory=subcategory, - op_name=op_name, - id=nvpair.get("id", ""), - name=name, - value=nvpair.get("value", ""), - ) - ) - return parameters - def _parse_resource(self, element, category): - """ - Parse resource-specific configuration parameters + for sub_category, xpath in self.RESOURCE_CATEGORIES.items(): + elements = root.findall(xpath) + for element in elements: + parameters.extend(self._parse_resource(element, sub_category)) - :param element: XML element to parse - :type element: ElementTree.Element - :param category: Resource category (e.g., stonith, ipaddr) - :type category: str - :return: List of Parameters objects for the resource - :rtype: List[Parameters] - """ - parameters = [] + for group in root.findall(".//group"): + group_id = group.get("id", "") + if "ASCS" in group_id: + for element in group.findall(".//primitive[@type='SAPInstance']"): + parameters.extend(self._parse_resource(element, "ascs")) + elif "ERS" in group_id: + for element in group.findall(".//primitive[@type='SAPInstance']"): + parameters.extend(self._parse_resource(element, "ers")) - for attr in ["meta_attributes", "instance_attributes"]: - attr_elements = element.find(f".//{attr}") - if attr_elements is not None: - parameters.extend( - self._parse_nvpair_elements( - elements=attr_elements.findall(".//nvpair"), - category=category, - subcategory=attr, - ) - ) - - operations = element.find(".//operations") - if operations is not None: - for operation in operations.findall(".//op"): - for op_type in ["timeout", "interval"]: - parameters.append( - self._create_parameter( - category=category, - subcategory="operations", - id=operation.get("id", ""), - name=op_type, - op_name=operation.get("name", ""), - value=operation.get(op_type, ""), - ) - ) return parameters - def _parse_basic_config(self, element, category, subcategory=None): - """ - Parse basic configuration parameters - - :param element: XML element to parse - :type element: ElementTree.Element - :param category: Category of the parameters - :type category: str - :param subcategory: Subcategory of the parameters - :type subcategory: str - :return: List of Parameters objects for basic configuration - :rtype: List[Parameters] - """ - parameters = [] - for nvpair in element.findall(".//nvpair"): - parameters.append( - self._create_parameter( - category=category, - subcategory=subcategory, - name=nvpair.get("name", ""), - value=nvpair.get("value", ""), - id=nvpair.get("id", ""), - ) - ) - return parameters - - def _parse_os_parameters(self): - """ - Parse OS-specific parameters - - :return: List of Parameters objects for OS parameters - :rtype: List[Parameters] - """ - parameters = [] - - os_parameters = self.constants["OS_PARAMETERS"].get("DEFAULTS", {}) - - for section, params in os_parameters.items(): - for param_name, expected_value in params.items(): - value = ( - self.execute_command_subprocess(command=[section, param_name]) - .strip() - .split("\n")[0] - ) - parameters.append( - self._create_parameter( - category="os", - id=section, - name=param_name, - value=value, - expected_value=expected_value, - ) - ) - - return parameters - - def _parse_constraints(self, root): - """ - Parse constraints configuration parameters - - :param root: XML root element - :type root: ElementTree.Element - :return: List of Parameters objects for constraints - :rtype: List[Parameters] - """ - parameters = [] - for element in root: - tag = element.tag - if tag in self.constants["CONSTRAINTS"]: - for attr, expected in self.constants["CONSTRAINTS"][tag].items(): - if element.get(attr) is not None: - parameters.append( - self._create_parameter( - category="constraints", - subcategory=tag, - id=element.get("id", ""), - name=attr, - value=element.get(attr), - expected_value=expected, - ) - ) - else: - continue - else: - continue - return parameters - - def parse_ha_cluster_config(self): - """ - Parse HA cluster configuration XML and return a list of properties. - """ - parameters = [] - - for scope in [ - "rsc_defaults", - "crm_config", - "op_defaults", - "constraints", - "resources", - ]: - self.category = scope - root = self.parse_xml_output(self.execute_command_subprocess(CIB_ADMIN(scope=scope))) - if not root: - continue - - if self.category in self.BASIC_CATEGORIES: - try: - xpath = self.BASIC_CATEGORIES[self.category][0] - for element in root.findall(xpath): - parameters.extend(self._parse_basic_config(element, self.category)) - except Exception as ex: - self.result[ - "message" - ] += f"Failed to get {self.category} configuration: {str(ex)}" - continue - - elif self.category == "resources": - try: - for sub_category, xpath in self.RESOURCE_CATEGORIES.items(): - elements = root.findall(xpath) - for element in elements: - parameters.extend(self._parse_resource(element, sub_category)) - - for group in root.findall(".//group"): - group_id = group.get("id", "") - if "ASCS" in group_id: - for element in group.findall(".//primitive[@type='SAPInstance']"): - parameters.extend(self._parse_resource(element, "ascs")) - elif "ERS" in group_id: - for element in group.findall(".//primitive[@type='SAPInstance']"): - parameters.extend(self._parse_resource(element, "ers")) - - except Exception as ex: - self.result[ - "message" - ] += f"Failed to get resources configuration for {self.category}: {str(ex)}" - continue - - elif self.category == "constraints": - try: - parameters.extend(self._parse_constraints(root)) - except Exception as e: - self.result["message"] += f"Failed to get constraints configuration: {str(e)}" - continue - - try: - parameters.extend(self._parse_os_parameters()) - except Exception as ex: - self.result["message"] += f"Failed to get OS parameters: {str(ex)} \n" - - failed_parameters = [ - param - for param in parameters - if param.get("status", TestStatus.ERROR.value) == TestStatus.ERROR.value - ] - self.result.update( - { - "details": {"parameters": parameters}, - "status": ( - TestStatus.ERROR.value if failed_parameters else TestStatus.SUCCESS.value - ), - } - ) - self.result["message"] += "HA Parameter Validation completed successfully." - def main() -> None: """ Main entry point for the Ansible module. """ - module = AnsibleModule( - argument_spec=dict( - sid=dict(type="str"), - ascs_instance_number=dict(type="str"), - ers_instance_number=dict(type="str"), - virtual_machine_name=dict(type="str"), - pcmk_constants=dict(type="dict"), - fencing_mechanism=dict(type="str"), - nfs_provider=dict(type="str", default=""), - filter=dict(type="str", required=False, default="os_family"), + try: + module = AnsibleModule( + argument_spec=dict( + sid=dict(type="str"), + ascs_instance_number=dict(type="str"), + ers_instance_number=dict(type="str"), + virtual_machine_name=dict(type="str"), + pcmk_constants=dict(type="dict"), + fencing_mechanism=dict(type="str"), + nfs_provider=dict(type="str", default=""), + cib_output=dict(type="str", required=False, default=""), + os_family=dict(type="str", required=False), + filter=dict(type="str", required=False, default="os_family"), + ) ) - ) + os_family = module.params.get("os_family") or ansible_facts(module).get( + "os_family", "UNKNOWN" + ) + except Exception: + module = AnsibleModule( + argument_spec=dict( + sid=dict(type="str"), + ascs_instance_number=dict(type="str"), + ers_instance_number=dict(type="str"), + virtual_machine_name=dict(type="str"), + pcmk_constants=dict(type="dict"), + fencing_mechanism=dict(type="str"), + nfs_provider=dict(type="str", default=""), + cib_output=dict(type="str", required=False, default=""), + os_family=dict(type="str", required=False, default="UNKNOWN"), + ) + ) + os_family = module.params.get("os_family", "UNKNOWN").upper() validator = HAClusterValidator( sid=module.params["sid"], scs_instance_number=module.params["ascs_instance_number"], ers_instance_number=module.params["ers_instance_number"], - os_type=OperatingSystemFamily( - str(ansible_facts(module).get("os_family", "UNKNOWN")).upper() - ), + os_type=OperatingSystemFamily(os_family), virtual_machine_name=module.params["virtual_machine_name"], constants=module.params["pcmk_constants"], fencing_mechanism=module.params["fencing_mechanism"], nfs_provider=module.params.get("nfs_provider"), + cib_output=module.params.get("cib_output"), ) module.exit_json(**validator.get_result()) diff --git a/src/modules/render_html_report.py b/src/modules/render_html_report.py index dad51d73..fc8bcbef 100644 --- a/src/modules/render_html_report.py +++ b/src/modules/render_html_report.py @@ -14,9 +14,11 @@ from ansible.module_utils.basic import AnsibleModule try: - from ansible.module_utils.sap_automation_qa import SapAutomationQA, TestStatus + from ansible.module_utils.sap_automation_qa import SapAutomationQA + from ansible.module_utils.enums import TestStatus except ImportError: - from src.module_utils.sap_automation_qa import SapAutomationQA, TestStatus + from src.module_utils.sap_automation_qa import SapAutomationQA + from src.module_utils.enums import TestStatus DOCUMENTATION = r""" --- diff --git a/src/playbook_00_ha_db_functional_tests.yml b/src/playbook_00_ha_db_functional_tests.yml index 05e2fd2a..e8daa9ae 100644 --- a/src/playbook_00_ha_db_functional_tests.yml +++ b/src/playbook_00_ha_db_functional_tests.yml @@ -87,7 +87,7 @@ ansible.builtin.include_tasks: "./roles/misc/tasks/cluster-report.yml" when: test_group_name is defined - - name: "Run test cases by including them as roles" + - name: "Render HTML report for the test group logs" ansible.builtin.include_tasks: "./roles/misc/tasks/render-html-report.yml" when: test_group_name is defined diff --git a/src/playbook_00_ha_scs_functional_tests.yml b/src/playbook_00_ha_scs_functional_tests.yml index 0495a56c..aef99511 100644 --- a/src/playbook_00_ha_scs_functional_tests.yml +++ b/src/playbook_00_ha_scs_functional_tests.yml @@ -57,7 +57,7 @@ group_name: "{{ sap_functional_test_type }}" when: test_group_name is defined - - name: "Run test cases by including them as roles" + - name: "Render HTML report for the test group logs" ansible.builtin.include_tasks: "./roles/misc/tasks/render-html-report.yml" when: test_group_name is defined diff --git a/src/playbook_01_ha_offline_tests.yml b/src/playbook_01_ha_offline_tests.yml new file mode 100644 index 00000000..f12040e6 --- /dev/null +++ b/src/playbook_01_ha_offline_tests.yml @@ -0,0 +1,58 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +# /*---------------------------------------------------------------------------8 +# | | +# | HA Testing Framework | +# | | +# +------------------------------------4--------------------------------------*/ +- hosts: localhost + gather_facts: false + name: "Setup deployer for HA Testing Framework" + vars_files: "./vars/input-api.yaml" + tasks: + - name: "Install python azure pacakges required" + ansible.builtin.pip: + name: + - ansible-runner + - azure-kusto-data + - azure-kusto-ingest + - azure-identity + - azure-mgmt-network + - pandas + + - name: "Set the test group name based on the inputs" + ansible.builtin.set_fact: + test_group_name: "{{ sap_functional_test_type_map + | selectattr('name', 'equalto', sap_functional_test_type) + | map(attribute='value') | first }}{{ '_' + (platform | upper) + if sap_functional_test_type == 'DatabaseHighAvailability' else '' }}" + run_once: true + + - name: Set the test group facts + ansible.builtin.set_fact: + test_group_start_time: "{{ now(utc=true,fmt='%Y-%m-%d %H:%M:%S') }}" + test_group_invocation_id: "{{ lookup('pipe', 'uuidgen') }}" + test_cases: "{{ test_groups + | selectattr('name', 'equalto', test_group_name) + | map(attribute='test_cases') | list | flatten(levels=1) + | selectattr('task_name', 'contains', 'offline') | list }}" + run_once: true + + - name: "Run test cases by including them as roles" + ansible.builtin.include_tasks: "./roles/{{ test_group_name | lower }}/tasks/{{ item.task_name }}.yml" + loop: "{{ test_cases | list }}" + vars: + group_invocation_id: "{{ test_group_invocation_id }}" + group_start_time: "{{ test_group_start_time }}" + group_name: "{{ sap_functional_test_type }}" + when: test_group_name is defined + + - name: "Render HTML report for the test group logs" + ansible.builtin.include_tasks: "./roles/misc/tasks/render-html-report.yml" + when: test_group_name is defined + + - name: "Debug the group_invocation_id" + ansible.builtin.debug: + msg: "Group invocation ID: {{ test_group_invocation_id }}" + when: test_group_invocation_id is defined diff --git a/src/roles/ha_db_hana/tasks/ha-config-offline.yml b/src/roles/ha_db_hana/tasks/ha-config-offline.yml new file mode 100644 index 00000000..4093a480 --- /dev/null +++ b/src/roles/ha_db_hana/tasks/ha-config-offline.yml @@ -0,0 +1,46 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +# /*--------------------------------------------------------------------------- +# | HA Configuration Validation | +# +--------------------------------------------------------------------------*/ + +- name: "Find all CIB files in offline_validation directory" + ansible.builtin.find: + paths: "{{ _workspace_directory }}/offline_validation/" + patterns: "cib" + recurse: true + register: cib_files + delegate_to: localhost + +- name: "Get DB hostnames from inventory" + ansible.builtin.set_fact: + db_hostnames: "{{ groups + | dict2items + | selectattr('key', 'match', '.*_DB$') + | map(attribute='value') + | flatten }}" + +- name: "Filter CIB files for DB hosts only" + ansible.builtin.set_fact: + db_cib_files: "{{ cib_files.files + | selectattr('path', 'match', '.*offline_validation/(' + (db_hostnames | join('|')) + ')/cib$') + | list }}" + when: cib_files.files | length > 0 and db_hostnames | length > 0 + +- name: "Set empty list if no DB hosts found" + ansible.builtin.set_fact: + db_cib_files: [] + when: db_hostnames | default([]) | length == 0 + +- name: "Debug message when no DB hosts found" + ansible.builtin.debug: + msg: "No hosts found for Database layer, validation not run" + when: db_cib_files | default([]) | length == 0 + +- name: "Process HA Configuration for each DB host" + ansible.builtin.include_tasks: "./roles/misc/tasks/offline-validation.yml" + loop: "{{ db_cib_files | default([]) }}" + loop_control: + loop_var: offline_validation_host + when: db_cib_files | default([]) | length > 0 diff --git a/src/roles/ha_db_hana/tasks/ha-config.yml b/src/roles/ha_db_hana/tasks/ha-config.yml index de3aac75..db411ea8 100644 --- a/src/roles/ha_db_hana/tasks/ha-config.yml +++ b/src/roles/ha_db_hana/tasks/ha-config.yml @@ -9,7 +9,15 @@ - name: "Ensure a list of package version is available for logging" no_log: true - ansible.builtin.package_facts: + block: + - name: "Try to collect package facts" + ansible.builtin.package_facts: + rescue: + - name: "Package facts collection failed - using empty list" + ansible.builtin.set_fact: + ansible_facts: + packages: {} + no_log: true - name: "Create package dictionary for telemetry data" become: true @@ -39,7 +47,6 @@ instance_number: "{{ db_instance_number }}" virtual_machine_name: "{{ azure_instance_metadata.json.compute.name }}" fencing_mechanism: "{{ database_cluster_type }}" - os_version: "{{ ansible_distribution_version }}" pcmk_constants: "{{ lookup('file', 'constants.yaml') | from_yaml }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" register: test_result diff --git a/src/roles/ha_scs/tasks/ha-config-offline.yml b/src/roles/ha_scs/tasks/ha-config-offline.yml new file mode 100644 index 00000000..c0f68ce4 --- /dev/null +++ b/src/roles/ha_scs/tasks/ha-config-offline.yml @@ -0,0 +1,56 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +# /*--------------------------------------------------------------------------- +# | HA Configuration Validation | +# +--------------------------------------------------------------------------*/ + +- name: "Find all CIB files in offline_validation directory" + ansible.builtin.find: + paths: "{{ _workspace_directory }}/offline_validation/" + patterns: "cib" + recurse: true + register: cib_files + delegate_to: localhost + +- name: "Get SCS and ERS hostnames from inventory" + ansible.builtin.set_fact: + scs_hostnames: "{{ groups + | dict2items + | selectattr('key', 'match', '.*_SCS$') + | map(attribute='value') + | flatten }}" + + ers_hostnames: "{{ groups + | dict2items + | selectattr('key', 'match', '.*_ERS$') + | map(attribute='value') + | flatten }}" + +- name: "Combine SCS and ERS hostnames" + ansible.builtin.set_fact: + scs_ers_hostnames: "{{ (scs_hostnames | default([])) + (ers_hostnames | default([])) }}" + +- name: "Filter CIB files for SCS and ERS hosts only" + ansible.builtin.set_fact: + scs_ers_cib_files: "{{ cib_files.files + | selectattr('path', 'match', '.*offline_validation/(' + (scs_ers_hostnames | join('|')) + ')/cib$') + | list }}" + when: cib_files.files | length > 0 and scs_ers_hostnames | length > 0 + +- name: "Set empty list if no SCS and ERS hosts found" + ansible.builtin.set_fact: + scs_ers_cib_files: [] + when: scs_ers_hostnames | default([]) | length == 0 + +- name: "Debug message when no SCS and ERS hosts found" + ansible.builtin.debug: + msg: "No hosts found for SCS and ERS layers, validation not run" + when: scs_ers_cib_files | default([]) | length == 0 + +- name: "Process HA Configuration for each SCS and ERS host" + ansible.builtin.include_tasks: "./roles/misc/tasks/offline-validation.yml" + loop: "{{ scs_ers_cib_files | default([]) }}" + loop_control: + loop_var: offline_validation_host + when: scs_ers_cib_files | default([]) | length > 0 diff --git a/src/roles/misc/tasks/offline-validation.yml b/src/roles/misc/tasks/offline-validation.yml new file mode 100644 index 00000000..942cd147 --- /dev/null +++ b/src/roles/misc/tasks/offline-validation.yml @@ -0,0 +1,84 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +# /*--------------------------------------------------------------------------- +# | Process HA Configuration for Single Host | +# +--------------------------------------------------------------------------*/ + +- name: "Extract hostname from CIB file path" + ansible.builtin.set_fact: + current_hostname: "{{ offline_validation_host.path | regex_replace('.*/offline_validation/([^/]+)/cib$', '\\1') }}" + +- name: "Test Setup Tasks for {{ current_hostname }}" + ansible.builtin.include_tasks: "roles/misc/tasks/test-case-setup.yml" + +- name: "Read CIB content for {{ current_hostname }}" + ansible.builtin.slurp: + src: "{{ offline_validation_host.path }}" + register: cib_file_content + delegate_to: localhost + +- name: "HA Configuration check for {{ current_hostname }}" + block: + - name: "Offline HA Configuration validation for DB" + when: sap_functional_test_type == "DatabaseHighAvailability" + block: + - name: "Get Pacemaker properties for DB" + get_pcmk_properties_db: + sid: "{{ db_sid | upper }}" + instance_number: "{{ db_instance_number }}" + virtual_machine_name: "{{ current_hostname }}" + fencing_mechanism: "{{ database_cluster_type }}" + pcmk_constants: "{{ lookup('file', '../../ha_db_hana/tasks/files/constants.yaml') | from_yaml }}" + saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" + os_family: "{{ target_os_family | default('UNKNOWN') }}" + cib_output: "{{ cib_file_content.content | b64decode if cib_file_content.content is defined else '' }}" + register: test_result + + - name: "Set the test case status to PASSED for {{ current_hostname }}" + ansible.builtin.set_fact: + test_case_name: "HA Parameters Validation: {{ current_hostname }}" + test_case_status: "{{ test_result.status }}" + test_case_message: "{{ test_result.message }}" + test_case_details: "{{ test_result.details }}" + test_case_hostname: "{{ current_hostname }}" + + - name: "Offline HA Configuration validation for SCS" + when: sap_functional_test_type == "CentralServicesHighAvailability" + block: + - name: "Get Pacemaker properties for SCS" + get_pcmk_properties_scs: + sid: "{{ sap_sid | upper }}" + ascs_instance_number: "{{ scs_instance_number }}" + ers_instance_number: "{{ ers_instance_number }}" + virtual_machine_name: "{{ current_hostname }}" + pcmk_constants: "{{ lookup('file', '../../ha_scs/tasks/files/constants.yaml') | from_yaml }}" + fencing_mechanism: "{{ scs_cluster_type }}" + nfs_provider: "{{ NFS_provider }}" + os_family: "{{ target_os_family | default('UNKNOWN') }}" + cib_output: "{{ cib_file_content.content | b64decode if cib_file_content.content is defined else '' }}" + register: test_result + + - name: "Set the test case status to PASSED for {{ current_hostname }}" + ansible.builtin.set_fact: + test_case_name: "HA Parameters Validation: {{ current_hostname }}" + test_case_status: "{{ test_result.status }}" + test_case_message: "{{ test_result.message }}" + test_case_details: "{{ test_result.details }}" + test_case_hostname: "{{ current_hostname }}" + + - name: "Post Telemetry Data for {{ current_hostname }}" + ansible.builtin.include_tasks: "roles/misc/tasks/post-telemetry-data.yml" + + rescue: + - name: "Test case failed for {{ current_hostname }}" + ansible.builtin.set_fact: + test_case_name: "HA Parameters Validation: {{ current_hostname }}" + test_case_status: "FAILED" + test_case_details: "{{ test_result | default('Test execution failed') }}" + test_case_message: "{{ ansible_failed_result.msg | default('Unknown error occurred') }}" + test_case_hostname: "{{ current_hostname }}" + package_versions: "{{ packages_list.details | default({}) }}" + + - name: "Post Telemetry Data for failed {{ current_hostname }}" + ansible.builtin.include_tasks: "roles/misc/tasks/post-telemetry-data.yml" diff --git a/src/roles/misc/tasks/post-telemetry-data.yml b/src/roles/misc/tasks/post-telemetry-data.yml index 3e391682..037530d7 100644 --- a/src/roles/misc/tasks/post-telemetry-data.yml +++ b/src/roles/misc/tasks/post-telemetry-data.yml @@ -26,7 +26,7 @@ "TestGroupInvocationId": "{{ group_invocation_id | default('') }}", "TestGroupStartTime": "{{ group_start_time | default('') }}", "TestGroupName": "{{ group_name | default('') }}", - "OsVersion": "{{ ansible_distribution }} {{ ansible_distribution_version }}", + "OsVersion": "{{ target_os_family | default(ansible_distribution | default('') ~ ' ' ~ ansible_distribution_version | default('')) }}", "TestCaseMessage": "{{ test_case_message | default('') }}", "TestCaseDetails": "{{ test_case_details | default('') }}", "DurationSeconds": "{{ diff --git a/src/roles/misc/tasks/render-html-report.yml b/src/roles/misc/tasks/render-html-report.yml index 148f8b82..a54cb2d6 100644 --- a/src/roles/misc/tasks/render-html-report.yml +++ b/src/roles/misc/tasks/render-html-report.yml @@ -16,7 +16,7 @@ - name: "Read the log file and create a HTML report" render_html_report: test_group_invocation_id: "{{ test_group_invocation_id }}" - test_group_name: "{{ test_group_name }}_{{ ansible_os_family | upper }}" + test_group_name: "{{ test_group_name }}_{{ target_os_family | default(ansible_os_family | default('SUSE')) | upper }}" report_template: "{{ html_report_template }}" workspace_directory: "{{ _workspace_directory }}" register: report_file_path diff --git a/src/vars/input-api.yaml b/src/vars/input-api.yaml index 5867ebf9..41e0d6b0 100644 --- a/src/vars/input-api.yaml +++ b/src/vars/input-api.yaml @@ -22,6 +22,15 @@ test_groups: and HANA system replication setup. enabled: true + - name: HA Parameters Validation Offline + task_name: ha-config-offline + description: | + The HA parameter validation test validates HA configuration, + including Corosync settings, Pacemaker resources, SBD device configuration, + and HANA system replication setup. This test is run in an offline mode where the CIB files are + already available in the offline_validation directory. + enabled: false + - name: Azure Load Balancer Validation task_name: azure-lb description: | @@ -140,6 +149,15 @@ test_groups: replication setup. enabled: true + - name: "HA Parameters Validation Offline" + task_name: ha-config-offline + description: | + The HA parameter validation test validates HA configuration + including Corosync settings, Pacemaker resources, SBD device configuration, and SCS system + replication setup. This test is run in an offline mode where the CIB files are + already available in the offline_validation directory. + enabled: false + - name: Azure Load Balancer Validation task_name: azure-lb description: | diff --git a/tests/module_utils/get_cluster_status_test.py b/tests/module_utils/get_cluster_status_test.py index e601ddfa..8b7e0823 100644 --- a/tests/module_utils/get_cluster_status_test.py +++ b/tests/module_utils/get_cluster_status_test.py @@ -245,7 +245,6 @@ def test_run_cluster_ready(self, mocker, base_checker: TestableBaseClusterChecke "active", ] - # Set the test ready flag to True base_checker.test_ready = True base_checker.test_stable = True @@ -266,7 +265,7 @@ def test_run_cluster_unstable(self, mocker, base_checker: TestableBaseClusterChe mocker.patch.object(base_checker, "execute_command_subprocess", return_value="reboot") base_checker.test_ready = True - base_checker.test_stable = False # Cluster is not stable + base_checker.test_stable = False result = base_checker.run() diff --git a/tests/module_utils/get_pcmk_properties_test.py b/tests/module_utils/get_pcmk_properties_test.py new file mode 100644 index 00000000..ea96dcbe --- /dev/null +++ b/tests/module_utils/get_pcmk_properties_test.py @@ -0,0 +1,464 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +""" +Unit tests for the get_pcmk_properties module. +""" + +import io +import xml.etree.ElementTree as ET +import pytest +from src.module_utils.get_pcmk_properties import BaseHAClusterValidator +from src.module_utils.enums import OperatingSystemFamily, TestStatus + +DUMMY_XML_RSC = """ + + + + + +""" + +DUMMY_XML_OP = """ + + + + +""" + +DUMMY_XML_CRM = """ + + + + + +""" + +DUMMY_XML_CONSTRAINTS = """ + + + + +""" + +DUMMY_XML_RESOURCES = """ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +""" + +DUMMY_XML_FULL_CIB = f""" + + + {DUMMY_XML_CRM} + {DUMMY_XML_RSC} + {DUMMY_XML_OP} + {DUMMY_XML_CONSTRAINTS} + {DUMMY_XML_RESOURCES} + +""" + +DUMMY_OS_COMMAND = """kernel.numa_balancing = 0""" + +DUMMY_CONSTANTS = { + "VALID_CONFIGS": { + "REDHAT": {"stonith-enabled": "true", "cluster-name": "hdb_HDB"}, + "azure-fence-agent": {"priority": "10"}, + "sbd": {"pcmk_delay_max": "30"}, + }, + "RSC_DEFAULTS": { + "resource-stickiness": "1000", + "migration-threshold": "5000", + }, + "OP_DEFAULTS": { + "timeout": "600", + "record-pending": "true", + }, + "CRM_CONFIG_DEFAULTS": { + "stonith-enabled": "true", + "maintenance-mode": "false", + }, + "RESOURCE_DEFAULTS": { + "REDHAT": { + "fence_agent": { + "meta_attributes": {"pcmk_delay_max": "15", "target-role": "Started"}, + "operations": { + "monitor": {"timeout": ["700", "700s"], "interval": "10"}, + "start": {"timeout": "20"}, + }, + "instance_attributes": {"login": "testuser"}, + }, + "sbd_stonith": { + "meta_attributes": {"pcmk_delay_max": "30", "target-role": "Started"}, + "operations": { + "monitor": {"timeout": ["30", "30s"], "interval": "10"}, + "start": {"timeout": "20"}, + }, + }, + "test_resource": { + "meta_attributes": {"clone-max": "2"}, + "operations": {"monitor": {"timeout": ["600", "600s"]}}, + "instance_attributes": {"SID": "HDB"}, + }, + } + }, + "OS_PARAMETERS": { + "DEFAULTS": {"sysctl": {"kernel.numa_balancing": "kernel.numa_balancing = 0"}} + }, + "CONSTRAINTS": { + "rsc_location": {"score": "INFINITY"}, + "rsc_colocation": {"score": "4000"}, + "rsc_order": {"kind": "Optional"}, + }, +} + + +def fake_open_factory(file_content): + """Factory function to create a fake open function.""" + + def fake_open(*args, **kwargs): + return io.StringIO(file_content) + + return fake_open + + +class TestableBaseHAClusterValidator(BaseHAClusterValidator): + """ + Testable implementation of BaseHAClusterValidator for testing purposes. + """ + + RESOURCE_CATEGORIES = { + "sbd_stonith": ".//primitive[@type='external/sbd']", + "fence_agent": ".//primitive[@type='fence_azure_arm']", + "test_resource": ".//primitive[@id='rsc_SAPHanaTopology_HDB_HDB00']", + } + + def _get_additional_parameters(self): + """ + Mock implementation of additional parameters. + """ + return [ + self._create_parameter( + category="additional", + name="test_param", + value="test_value", + expected_value="test_value", + ) + ] + + +class TestBaseHAClusterValidator: + """ + Test cases for the BaseHAClusterValidator class. + """ + + @pytest.fixture + def mock_xml_outputs(self): + """ + Fixture for providing mock XML outputs. + """ + return { + "rsc_defaults": DUMMY_XML_RSC, + "crm_config": DUMMY_XML_CRM, + "op_defaults": DUMMY_XML_OP, + "constraints": DUMMY_XML_CONSTRAINTS, + "resources": DUMMY_XML_RESOURCES, + } + + @pytest.fixture + def validator(self, monkeypatch, mock_xml_outputs): + """ + Fixture for creating a TestableBaseHAClusterValidator instance. + """ + + def mock_execute_command(*args, **kwargs): + """ + Mock function to replace execute_command_subprocess. + """ + command = args[0] if args else kwargs.get("command", []) + command_str = " ".join(command) if isinstance(command, list) else str(command) + if "sysctl" in command_str: + return DUMMY_OS_COMMAND + if len(command) >= 2 and command[-1] in mock_xml_outputs: + return mock_xml_outputs[command[-1]] + return "" + + monkeypatch.setattr( + "src.module_utils.sap_automation_qa.SapAutomationQA.execute_command_subprocess", + mock_execute_command, + ) + + return TestableBaseHAClusterValidator( + os_type=OperatingSystemFamily.REDHAT, + sid="HDB", + virtual_machine_name="vmname", + constants=DUMMY_CONSTANTS, + fencing_mechanism="sbd", + cib_output="", + ) + + @pytest.fixture + def validator_with_cib(self): + """ + Fixture for creating a validator with CIB output. + """ + return TestableBaseHAClusterValidator( + os_type=OperatingSystemFamily.REDHAT, + sid="HDB", + virtual_machine_name="vmname", + constants=DUMMY_CONSTANTS, + fencing_mechanism="sbd", + cib_output=DUMMY_XML_FULL_CIB, + ) + + def test_init(self, validator): + """ + Test the __init__ method. + """ + assert validator.os_type == "REDHAT" + assert validator.sid == "HDB" + assert validator.virtual_machine_name == "vmname" + assert validator.fencing_mechanism == "sbd" + assert validator.constants == DUMMY_CONSTANTS + assert validator.cib_output == "" + + def test_get_expected_value_fence_config(self, validator): + """ + Test _get_expected_value method with fence configuration. + """ + validator.fencing_mechanism = "azure-fence-agent" + expected = validator._get_expected_value("crm_config", "priority") + assert expected == "10" + + def test_get_resource_expected_value_instance_attributes(self, validator): + """ + Test _get_resource_expected_value method for instance_attributes section. + """ + expected = validator._get_resource_expected_value( + "fence_agent", "instance_attributes", "login" + ) + assert expected == "testuser" + + def test_get_resource_expected_value_invalid_section(self, validator): + """ + Test _get_resource_expected_value method for invalid section. + """ + expected = validator._get_resource_expected_value("fence_agent", "invalid_section", "param") + assert expected is None + + def test_create_parameter_with_expected_value(self, validator): + """ + Test _create_parameter method with provided expected value. + """ + param = validator._create_parameter( + category="test", + name="test_param", + value="test_value", + expected_value="test_value", + id="test_id", + ) + assert param["category"] == "test" + assert param["name"] == "test_param" + assert param["value"] == "test_value" + assert param["expected_value"] == "test_value" + assert param["status"] == TestStatus.SUCCESS.value + assert param["id"] == "test_id" + + def test_create_parameter_with_subcategory(self, validator): + """ + Test _create_parameter method with subcategory. + """ + param = validator._create_parameter( + category="test", + subcategory="sub", + name="test_param", + value="test_value", + expected_value="test_value", + ) + assert param["category"] == "test_sub" + + def test_determine_parameter_status_success_string(self, validator): + """ + Test _determine_parameter_status method with matching string values. + """ + status = validator._determine_parameter_status("true", "true") + assert status == TestStatus.SUCCESS.value + + def test_determine_parameter_status_error_string(self, validator): + """ + Test _determine_parameter_status method with non-matching string values. + """ + status = validator._determine_parameter_status("true", "false") + assert status == TestStatus.ERROR.value + + def test_parse_basic_config(self, validator): + """ + Test _parse_basic_config method. + """ + xml_str = """ + + + """ + params = validator._parse_basic_config( + ET.fromstring(xml_str), "crm_config", "test_subcategory" + ) + assert len(params) == 2 + assert params[0]["category"] == "crm_config_test_subcategory" + + def test_parse_resource_with_operations(self, validator): + """ + Test _parse_resource method with operations. + """ + xml_str = """ + + + + + """ + params = validator._parse_resource(ET.fromstring(xml_str), "test_resource") + timeout_params = [p for p in params if p["name"].endswith("_timeout")] + interval_params = [p for p in params if p["name"].endswith("_interval")] + assert len(timeout_params) == 2 + assert len(interval_params) == 2 + + def test_parse_constraints(self, validator): + """ + Test _parse_constraints method. + """ + xml_str = """ + + + + + """ + root = ET.fromstring(xml_str) + params = validator._parse_constraints(root) + location_params = [p for p in params if "rsc_location" in p["category"]] + colocation_params = [p for p in params if "rsc_colocation" in p["category"]] + order_params = [p for p in params if "rsc_order" in p["category"]] + assert len(location_params) >= 1 + assert len(colocation_params) >= 1 + assert len(order_params) >= 1 + + def test_parse_resources_section(self, validator): + """ + Test _parse_resources_section method. + """ + xml_str = DUMMY_XML_RESOURCES + root = ET.fromstring(xml_str) + params = validator._parse_resources_section(root) + assert len(params) > 0 + + def test_should_skip_scope_redhat_op_defaults(self, validator): + """ + Test _should_skip_scope method for REDHAT op_defaults. + """ + assert validator._should_skip_scope("op_defaults") + + def test_should_skip_scope_non_redhat_op_defaults(self): + """ + Test _should_skip_scope method for non-REDHAT op_defaults. + """ + validator = TestableBaseHAClusterValidator( + os_type=OperatingSystemFamily.SUSE, + sid="HDB", + virtual_machine_name="vmname", + constants=DUMMY_CONSTANTS, + fencing_mechanism="sbd", + cib_output="", + ) + assert not validator._should_skip_scope("op_defaults") + + def test_get_scope_from_cib_with_cib_output(self, validator_with_cib): + """ + Test _get_scope_from_cib method with CIB output. + """ + scope_element = validator_with_cib._get_scope_from_cib("resources") + assert scope_element is not None + assert scope_element.tag == "resources" + + def test_get_scope_from_cib_without_cib_output(self, validator): + """ + Test _get_scope_from_cib method without CIB output. + """ + scope_element = validator._get_scope_from_cib("resources") + assert scope_element is None + + def test_parse_ha_cluster_config_with_cib(self, validator_with_cib): + """ + Test parse_ha_cluster_config method with CIB output. + """ + validator_with_cib.parse_ha_cluster_config() + result = validator_with_cib.get_result() + assert result["status"] in [TestStatus.SUCCESS.value, TestStatus.ERROR.value] + assert "parameters" in result["details"] + + def test_get_expected_value_for_category_resource(self, validator): + """ + Test _get_expected_value_for_category method for resource category. + """ + expected = validator._get_expected_value_for_category( + "fence_agent", "meta_attributes", "pcmk_delay_max", None + ) + assert expected == "15" + + def test_get_expected_value_for_category_basic(self, validator): + """ + Test _get_expected_value_for_category method for basic category. + """ + expected = validator._get_expected_value_for_category( + "crm_config", None, "stonith-enabled", None + ) + assert expected == "true" + + def test_determine_parameter_status_error_invalid_expected(self, validator): + """ + Test _determine_parameter_status method with invalid expected value type. + """ + status = validator._determine_parameter_status("value", {"invalid": "dict"}) + assert status == TestStatus.ERROR.value + + def test_parse_constraints_skip_missing_attributes(self, validator): + """ + Test _parse_constraints method skips elements with missing attributes. + """ + xml_str = """ + + """ + root = ET.fromstring(xml_str) + params = validator._parse_constraints(root) + score_params = [p for p in params if p["name"] == "score"] + assert len(score_params) == 0 + + def test_get_scope_from_cib_invalid_scope(self, validator_with_cib): + """ + Test _get_scope_from_cib method with invalid scope. + """ + scope_element = validator_with_cib._get_scope_from_cib("invalid_scope") + assert scope_element is None diff --git a/tests/modules/get_pcmk_properties_db_test.py b/tests/modules/get_pcmk_properties_db_test.py index 9d7d5664..135b7b7b 100644 --- a/tests/modules/get_pcmk_properties_db_test.py +++ b/tests/modules/get_pcmk_properties_db_test.py @@ -5,6 +5,7 @@ Unit tests for the get_pcmk_properties_db module. """ +import builtins import io import xml.etree.ElementTree as ET import pytest @@ -31,9 +32,6 @@ - - - """ DUMMY_XML_CONSTRAINTS = """ @@ -48,6 +46,13 @@ + + + + + + + @@ -76,22 +81,67 @@ - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + """ +DUMMY_XML_FULL_CIB = f""" + + + {DUMMY_XML_CRM} + {DUMMY_XML_RSC} + {DUMMY_XML_OP} + {DUMMY_XML_CONSTRAINTS} + {DUMMY_XML_RESOURCES} + +""" + DUMMY_OS_COMMAND = """kernel.numa_balancing = 0""" -DUMMY_GLOBAL_INI = """[DEFAULT] -dumm1 = dummy2 +DUMMY_GLOBAL_INI_SAPHANASR = """[DEFAULT] +dummy1 = dummy2 + +[ha_dr_provider_SAPHanaSR] +provider = SAPHanaSR +path = /usr/share/SAPHanaSR +execution_order = 1 +""" + +DUMMY_GLOBAL_INI_ANGI = """[DEFAULT] +dummy1 = dummy2 [ha_dr_provider_sushanasr] provider = SAPHanaSR-angi +path = /usr/share/SAPHanaSR-angi +execution_order = 1 """ DUMMY_CONSTANTS = { @@ -100,16 +150,12 @@ "azure-fence-agent": {"priority": "10"}, }, "RSC_DEFAULTS": { - "REDHAT": { - "resource-stickiness": "1000", - "migration-threshold": "5000", - } + "resource-stickiness": "1000", + "migration-threshold": "5000", }, "OP_DEFAULTS": { - "REDHAT": { - "timeout": "600", - "record-pending": "true", - } + "timeout": "600", + "record-pending": "true", }, "CRM_CONFIG_DEFAULTS": {"stonith-enabled": "true"}, "RESOURCE_DEFAULTS": { @@ -128,33 +174,63 @@ "OS_PARAMETERS": { "DEFAULTS": {"sysctl": {"kernel.numa_balancing": "kernel.numa_balancing = 0"}} }, - "GLOBAL_INI": {"REDHAT": {"provider": "SAPHanaSR"}, "SUSE": {"provider": "SAPHanaSR-angi"}}, + "GLOBAL_INI": { + "REDHAT": { + "SAPHanaSR": { + "provider": "SAPHanaSR", + "path": "/usr/share/SAPHanaSR", + "execution_order": ["1", "2"], + } + }, + "SUSE": { + "SAPHanaSR-angi": {"provider": "SAPHanaSR-angi", "path": "/usr/share/SAPHanaSR-angi"} + }, + }, "CONSTRAINTS": {"rsc_location": {"score": "INFINITY"}}, } -def fake_open_factory(file_content): +class MockExecuteCommand: """ - Factory function to create a fake open function that returns a StringIO object. + Mock class for execute_command_subprocess. + """ + + def __init__(self, mock_outputs): + self.mock_outputs = mock_outputs + + def __call__(self, command, shell_command=False): + command_str = " ".join(command) if isinstance(command, list) else str(command) + if "sysctl" in command_str: + return DUMMY_OS_COMMAND + if len(command) >= 2 and command[-1] in self.mock_outputs: + return self.mock_outputs[command[-1]] + return "" - :param file_content: Content to be returned by the fake open function. - :type file_content: str - :return: Fake open function. - :rtype: function + +class MockOpen: + """ + Mock class for open function. """ - def fake_open(*args, **kwargs): - """ - Fake open function that returns a StringIO object. + def __init__(self, file_content): + self.file_content = file_content - :param *args: Positional arguments. - :param **kwargs: Keyword arguments. - :return: Instance of StringIO with file content. - :rtype: io.StringIO - """ - return io.StringIO("\n".join(file_content)) + def __call__(self, *args, **kwargs): + return io.StringIO(self.file_content) + + +class TestableHAClusterValidator(HAClusterValidator): + """ + Testable version of HAClusterValidator with mocked dependencies. + """ + + def __init__(self, mock_execute_command, mock_open, *args, **kwargs): + self._mock_execute_command = mock_execute_command + self._mock_open = mock_open + super().__init__(*args, **kwargs) - return fake_open + def execute_command_subprocess(self, command, shell_command=False): + return self._mock_execute_command(command, shell_command) class TestHAClusterValidator: @@ -166,9 +242,6 @@ class TestHAClusterValidator: def mock_xml_outputs(self): """ Fixture for providing mock XML outputs. - - :return: Mock XML outputs. - :rtype: dict """ return { "rsc_defaults": DUMMY_XML_RSC, @@ -179,344 +252,334 @@ def mock_xml_outputs(self): } @pytest.fixture - def validator(self, monkeypatch, mock_xml_outputs): - """ - Fixture for creating a HAClusterValidator instance. - - :param monkeypatch: Monkeypatch fixture for mocking. - :type monkeypatch: pytest.MonkeyPatch - :param mock_xml_outputs: Mock XML outputs. - :type mock_xml_outputs: dict - :return: HAClusterValidator instance. - :rtype: HAClusterValidator - """ - - def mock_execute_command(*args, **kwargs): - """ - Mock function to replace execute_command_subprocess. - - :param *args: Positional arguments. - :param **kwargs: Keyword arguments. - :return: Mocked command output. - :rtype: str - """ - command = str(args[1]) if len(args) > 1 else str(kwargs.get("command")) - if "sysctl" in command: - return DUMMY_OS_COMMAND - return mock_xml_outputs.get(command[-1], "") - - monkeypatch.setattr( - "src.module_utils.sap_automation_qa.SapAutomationQA.execute_command_subprocess", - mock_execute_command, - ) - monkeypatch.setattr("builtins.open", fake_open_factory(DUMMY_GLOBAL_INI)) - return HAClusterValidator( - os_type=OperatingSystemFamily.REDHAT, - os_version="9.2", - sid="PRD", - instance_number="00", - fencing_mechanism="AFA", - virtual_machine_name="vmname", - constants=DUMMY_CONSTANTS, - saphanasr_provider=HanaSRProvider.SAPHANASR, - ) + def validator(self, mock_xml_outputs): + """ + Fixture for creating a TestableHAClusterValidator instance. + """ + mock_execute = MockExecuteCommand(mock_xml_outputs) + mock_open = MockOpen(DUMMY_GLOBAL_INI_SAPHANASR) + original_open = builtins.open + builtins.open = mock_open + try: + validator = TestableHAClusterValidator( + mock_execute, + mock_open, + os_type=OperatingSystemFamily.REDHAT, + sid="HDB", + instance_number="00", + fencing_mechanism="sbd", + virtual_machine_name="vmname", + constants=DUMMY_CONSTANTS, + saphanasr_provider=HanaSRProvider.SAPHANASR, + cib_output="", + ) + yield validator + finally: + builtins.open = original_open @pytest.fixture - def validator_angi(self, monkeypatch, mock_xml_outputs): - """ - Fixture for creating a HAClusterValidator instance. - - :param monkeypatch: Monkeypatch fixture for mocking. - :type monkeypatch: pytest.MonkeyPatch - :param mock_xml_outputs: Mock XML outputs. - :type mock_xml_outputs: dict - :return: HAClusterValidator instance. - :rtype: HAClusterValidator - """ - - def mock_execute_command(*args, **kwargs): - """ - Mock function to replace execute_command_subprocess. - - :param *args: Positional arguments. - :param **kwargs: Keyword arguments. - :return: Mocked command output. - :rtype: str - """ - command = str(args[1]) if len(args) > 1 else str(kwargs.get("command")) - if "sysctl" in command: - return DUMMY_OS_COMMAND - return mock_xml_outputs.get(command[-1], "") - - monkeypatch.setattr( - "src.module_utils.sap_automation_qa.SapAutomationQA.execute_command_subprocess", - mock_execute_command, - ) - monkeypatch.setattr("builtins.open", fake_open_factory(DUMMY_GLOBAL_INI)) + def validator_angi(self, mock_xml_outputs): + """ + Fixture for creating a TestableHAClusterValidator instance with ANGI provider. + """ + mock_execute = MockExecuteCommand(mock_xml_outputs) + mock_open = MockOpen(DUMMY_GLOBAL_INI_ANGI) + original_open = builtins.open + builtins.open = mock_open + try: + validator = TestableHAClusterValidator( + mock_execute, + mock_open, + os_type=OperatingSystemFamily.SUSE, + sid="HDB", + instance_number="00", + fencing_mechanism="sbd", + virtual_machine_name="vmname", + constants=DUMMY_CONSTANTS, + saphanasr_provider=HanaSRProvider.ANGI, + cib_output="", + ) + yield validator + finally: + builtins.open = original_open + + @pytest.fixture + def validator_with_cib(self): + """ + Fixture for creating a validator with CIB output. + """ return HAClusterValidator( - os_type=OperatingSystemFamily.SUSE, - os_version="9.2", - sid="PRD", + os_type=OperatingSystemFamily.REDHAT, + sid="HDB", instance_number="00", - fencing_mechanism="AFA", + fencing_mechanism="sbd", virtual_machine_name="vmname", constants=DUMMY_CONSTANTS, - saphanasr_provider=HanaSRProvider.ANGI, + saphanasr_provider=HanaSRProvider.SAPHANASR, + cib_output=DUMMY_XML_FULL_CIB, ) - def test_get_expected_value_fence_config(self, validator): + def test_init(self, validator): """ - Test _get_expected_value method with fence configuration. + Test the __init__ method. """ - validator.fencing_mechanism = "azure-fence-agent" - expected = validator._get_expected_value("crm_config", "priority") - assert expected == "10" + assert validator.os_type == "REDHAT" + assert validator.sid == "HDB" + assert validator.instance_number == "00" + assert validator.saphanasr_provider == HanaSRProvider.SAPHANASR - def test_get_expected_value_os_config(self, validator): + def test_parse_resources_section_saphanasr(self, validator): """ - Test _get_expected_value method with OS configuration. + Test _parse_resources_section method with SAPHanaSR provider. """ - expected = validator._get_expected_value("crm_config", "stonith-enabled") - assert expected == "true" + xml_str = DUMMY_XML_RESOURCES + root = ET.fromstring(xml_str) + params = validator._parse_resources_section(root) + assert len(params) > 0 + categories = [p.get("category", "") for p in params] + assert not any("angi_topology" in cat for cat in categories) - def test_get_expected_value_defaults(self, validator): + def test_parse_resources_section_angi(self, validator_angi): """ - Test _get_expected_value method with defaults. + Test _parse_resources_section method with ANGI provider. """ - expected = validator._get_expected_value("crm_config", "unknown-param") - assert expected is None + xml_str = DUMMY_XML_RESOURCES + root = ET.fromstring(xml_str) + params = validator_angi._parse_resources_section(root) + assert len(params) > 0 + categories = [p.get("category", "") for p in params] + assert not any(cat == "topology" for cat in categories) - def test_get_resource_expected_value_meta_attributes(self, validator): + def test_parse_global_ini_parameters_saphanasr(self, validator): """ - Test _get_resource_expected_value method for meta_attributes section. + Test _parse_global_ini_parameters method with SAPHanaSR provider. """ - expected = validator._get_resource_expected_value( - "fence_agent", "meta_attributes", "pcmk_delay_max" - ) - assert expected == "15" + params = validator._parse_global_ini_parameters() + assert len(params) > 0 + provider_params = [p for p in params if p["name"] == "provider"] + assert len(provider_params) == 1 + assert provider_params[0]["value"] == "SAPHanaSR" - def test_get_resource_expected_value_operations(self, validator): + def test_parse_global_ini_parameters_angi(self, validator_angi): """ - Test _get_resource_expected_value method for operations section. + Test _parse_global_ini_parameters method with ANGI provider. """ - expected = validator._get_resource_expected_value( - "fence_agent", "operations", "timeout", "monitor" - ) - assert expected == ["700", "700s"] + params = validator_angi._parse_global_ini_parameters() + assert len(params) > 0 + provider_params = [p for p in params if p["name"] == "provider"] + assert len(provider_params) == 1 + assert provider_params[0]["value"] == "SAPHanaSR-angi" - def test_get_resource_expected_value_unknown_section(self, validator): + def test_parse_global_ini_parameters_with_list_expected_value(self, validator): """ - Test _get_resource_expected_value method for unknown section. + Test _parse_global_ini_parameters with list expected value matching. """ - expected = validator._get_resource_expected_value("fence_agent", "unknown_section", "param") - assert expected is None + params = validator._parse_global_ini_parameters() + execution_params = [p for p in params if p["name"] == "execution_order"] + if execution_params: + assert execution_params[0]["status"] in [ + TestStatus.SUCCESS.value, + TestStatus.INFO.value, + ] - def test_create_parameter_with_empty_value(self, validator): + def test_parse_global_ini_parameters_exception_handling(self, validator): """ - Test _create_parameter method when value is empty. + Test _parse_global_ini_parameters exception handling. """ - param = validator._create_parameter( - category="test_category", name="test_param", value="", expected_value="expected" - ) - assert param["status"] == TestStatus.INFO.value + original_open = builtins.open - def test_create_parameter_with_list_expected_value_success(self, validator): - """ - Test _create_parameter method with list expected value - success case. - """ - param = validator._create_parameter( - category="test_category", - name="test_param", - value="value1", - expected_value=["value1", "value2"], - ) - assert param["status"] == TestStatus.SUCCESS.value - assert param["expected_value"] == "value1" + def mock_open_error(*args, **kwargs): + raise FileNotFoundError("File not found") - def test_create_parameter_with_list_expected_value_error(self, validator): - """ - Test _create_parameter method with list expected value - error case. - """ - param = validator._create_parameter( - category="test_category", - name="test_param", - value="value3", - expected_value=["value1", "value2"], - ) - assert param["status"] == TestStatus.ERROR.value + builtins.open = mock_open_error + try: + params = validator._parse_global_ini_parameters() + assert len(params) == 0 + finally: + builtins.open = original_open - def test_create_parameter_with_string_expected_value_success(self, validator): + def test_get_additional_parameters(self, validator): """ - Test _create_parameter method with string expected value - success case. + Test _get_additional_parameters method. """ - param = validator._create_parameter( - category="test_category", - name="test_param", - value="expected_value", - expected_value="expected_value", - ) - assert param["status"] == TestStatus.SUCCESS.value + params = validator._get_additional_parameters() + assert isinstance(params, list) + assert len(params) > 0 - def test_create_parameter_with_string_expected_value_error(self, validator): + def test_resource_categories_coverage(self, validator): """ - Test _create_parameter method with string expected value - error case. + Test all resource categories are parsed correctly. """ - param = validator._create_parameter( - category="test_category", - name="test_param", - value="actual_value", - expected_value="expected_value", - ) - assert param["status"] == TestStatus.ERROR.value + xml_str = DUMMY_XML_RESOURCES + root = ET.fromstring(xml_str) + params = validator._parse_resources_section(root) + categories = [p.get("category", "") for p in params] + expected_categories = [ + "sbd_stonith", + "topology", + "hana", + "ipaddr", + "azurelb", + "filesystem", + "fence_agent", + ] + found_categories = [] + for cat in expected_categories: + if any(cat in category for category in categories): + found_categories.append(cat) + assert len(found_categories) > 0 + + def test_parse_ha_cluster_config_with_cib(self, validator_with_cib): + """ + Test parse_ha_cluster_config method with CIB output. + """ + result = validator_with_cib.get_result() + assert result["status"] in [TestStatus.SUCCESS.value, TestStatus.ERROR.value] + assert "parameters" in result["details"] + assert "CIB output provided" in result["message"] - def test_create_parameter_with_invalid_expected_value_type(self, validator): + def test_main_with_ansible_module(self): """ - Test _create_parameter method with invalid expected value type. + Test main function with successful AnsibleModule creation. """ - param = validator._create_parameter( - category="test_category", - name="test_param", - value="test_value", - expected_value={"invalid": "type"}, - ) - assert param["status"] == TestStatus.ERROR.value + mock_result = {} - def test_create_parameter_with_none_expected_value(self, validator): + class MockAnsibleModule: + def __init__(self, argument_spec=None, **kwargs): + self.params = { + "sid": "HDB", + "instance_number": "00", + "virtual_machine_name": "vmname", + "fencing_mechanism": "sbd", + "pcmk_constants": DUMMY_CONSTANTS, + "saphanasr_provider": "SAPHanaSR", + "cib_output": "", + "os_family": "RedHat", + } + + def exit_json(self, **kwargs): + nonlocal mock_result + mock_result = kwargs + + def mock_ansible_facts(module): + return {"os_family": "RedHat"} + + import src.modules.get_pcmk_properties_db as module_under_test + + original_ansible_module = module_under_test.AnsibleModule + original_ansible_facts = module_under_test.ansible_facts + original_open = builtins.open + module_under_test.AnsibleModule = MockAnsibleModule + module_under_test.ansible_facts = mock_ansible_facts + builtins.open = MockOpen(DUMMY_GLOBAL_INI_SAPHANASR) + + try: + main() + assert "status" in mock_result + assert "message" in mock_result + finally: + module_under_test.AnsibleModule = original_ansible_module + module_under_test.ansible_facts = original_ansible_facts + builtins.open = original_open + + def test_main_with_exception_fallback(self): """ - Test _create_parameter method when expected_value is None. + Test main function with exception handling fallback. """ - param = validator._create_parameter( - category="crm_config", name="test_param", value="test_value", expected_value=None - ) - assert param["status"] == TestStatus.INFO.value + mock_result = {} + + class MockAnsibleModuleFallback: + def __init__(self, argument_spec=None, **kwargs): + self.params = { + "sid": "HDB", + "instance_number": "00", + "virtual_machine_name": "vmname", + "fencing_mechanism": "sbd", + "pcmk_constants": DUMMY_CONSTANTS, + "saphanasr_provider": "SAPHanaSR", + "cib_output": "", + "os_family": "RedHat", + } + + def exit_json(self, **kwargs): + nonlocal mock_result + mock_result = kwargs + + call_count = 0 + + def mock_ansible_module_factory(*args, **kwargs): + nonlocal call_count + call_count += 1 + if call_count == 1: + raise Exception("First call fails") + return MockAnsibleModuleFallback(*args, **kwargs) + + import src.modules.get_pcmk_properties_db as module_under_test + + original_ansible_module = module_under_test.AnsibleModule + original_open = builtins.open + module_under_test.AnsibleModule = mock_ansible_module_factory + builtins.open = MockOpen(DUMMY_GLOBAL_INI_SAPHANASR) + try: + main() + assert "status" in mock_result + finally: + module_under_test.AnsibleModule = original_ansible_module + builtins.open = original_open - def test_parse_global_ini_parameters_angi_provider(self, validator_angi): + def test_all_resource_types_parsed(self, validator): """ - Test _parse_global_ini_parameters method with ANGI provider. - Covers lines 420-447. + Test that all defined resource categories can be parsed. """ - result = validator_angi.get_result() - assert "details" in result - assert "parameters" in result["details"] + for category, xpath in HAClusterValidator.RESOURCE_CATEGORIES.items(): + xml_str = DUMMY_XML_RESOURCES + root = ET.fromstring(xml_str) + elements = root.findall(xpath) + if elements: + params = validator._parse_resource(elements[0], category) + assert isinstance(params, list) - def test_parse_basic_config(self, validator): + def test_global_ini_section_detection(self, validator_angi): """ - Test _parse_basic_config method. - Covers lines 462-473. + Test global.ini section detection for different providers. """ - xml_str = """ - - - """ - params = validator._parse_basic_config( - ET.fromstring(xml_str), "crm_config", "test_subcategory" - ) - assert len(params) == 2 - assert params[0]["category"] == "crm_config_test_subcategory" - assert params[0]["name"] == "test_param" - assert params[0]["value"] == "test_value" + params = validator_angi._parse_global_ini_parameters() + assert isinstance(params, list) - def test_parse_resource_hana_meta_and_topology_meta(self, validator): + def test_get_expected_value_methods(self, validator): """ - Test _parse_resource method for hana_meta and topology_meta categories. - Covers lines 486-521. + Test inherited expected value methods. """ - xml_str = """ - - """ - element = ET.fromstring(xml_str) - params = validator._parse_resource(element, "hana_meta") - assert len(params) > 0 - params = validator._parse_resource(element, "topology_meta") - assert len(params) > 0 + validator.fencing_mechanism = "azure-fence-agent" + expected = validator._get_expected_value("crm_config", "priority") + assert expected == "10" + expected = validator._get_expected_value("crm_config", "stonith-enabled") + assert expected == "true" + expected = validator._get_resource_expected_value( + "fence_agent", "meta_attributes", "pcmk_delay_max" + ) + assert expected == "15" - def test_parse_constraints_with_valid_constraints(self, validator_angi): + def test_parse_constraints_with_valid_constraints(self, validator): """ Test _parse_constraints method with valid constraints. - Covers lines 532-552. """ xml_str = """ - """ root = ET.fromstring(xml_str) - params = validator_angi._parse_constraints(root) - constraint_params = [p for p in params if p["category"] == "constraints_rsc_location"] - assert len(constraint_params) >= 1 + params = validator._parse_constraints(root) + assert len(params) > 0 - def test_parse_ha_cluster_config_redhat_skip_op_defaults(self, monkeypatch): + def test_successful_validation_result(self, validator): """ - Test parse_ha_cluster_config method with REDHAT OS skipping op_defaults. - Covers lines 574-607. + Test that validator returns proper result structure. """ - - def mock_execute_command(*args, **kwargs): - return "" - - monkeypatch.setattr( - "src.module_utils.sap_automation_qa.SapAutomationQA.execute_command_subprocess", - mock_execute_command, - ) - monkeypatch.setattr("builtins.open", fake_open_factory(DUMMY_GLOBAL_INI)) - validator = HAClusterValidator( - os_type=OperatingSystemFamily.REDHAT, - os_version="9.2", - sid="PRD", - instance_number="00", - fencing_mechanism="AFA", - virtual_machine_name="vmname", - constants=DUMMY_CONSTANTS, - saphanasr_provider=HanaSRProvider.SAPHANASR, - ) result = validator.get_result() + assert "status" in result + assert "message" in result assert "details" in result - - def test_parse_ha_cluster_config_success(self, validator): - """ - Test the parse_ha_cluster_config method for successful parsing. - - :param validator: HAClusterValidator instance. - :type validator: HAClusterValidator - """ - result = validator.get_result() - assert result["status"] == "PASSED" - - def test_main_method(self, monkeypatch): - """ - Test the main method of the module. - - :param monkeypatch: Monkeypatch fixture for mocking. - :type monkeypatch: - """ - mock_result = {} - - class MockAnsibleModule: - """ - Mock class for AnsibleModule. - """ - - def __init__(self, *args, **kwargs): - self.params = { - "sid": "PRD", - "instance_number": "00", - "virtual_machine_name": "vm_name", - "fencing_mechanism": "AFA", - "os_version": "9.2", - "pcmk_constants": DUMMY_CONSTANTS, - "saphanasr_provider": HanaSRProvider.SAPHANASR.value, - } - - def exit_json(self, **kwargs): - nonlocal mock_result - mock_result = kwargs - - monkeypatch.setattr( - "src.modules.get_pcmk_properties_db.AnsibleModule", - MockAnsibleModule, - ) - - main() - - assert mock_result["status"] == "PASSED" + assert "parameters" in result["details"] + assert isinstance(result["details"]["parameters"], list) diff --git a/tests/modules/get_pcmk_properties_scs_test.py b/tests/modules/get_pcmk_properties_scs_test.py index 82bbc2df..a2f542c9 100644 --- a/tests/modules/get_pcmk_properties_scs_test.py +++ b/tests/modules/get_pcmk_properties_scs_test.py @@ -2,10 +2,9 @@ # Licensed under the MIT License. """ -Unit tests for the get_pcmk_properties_db module. +Unit tests for the get_pcmk_properties_scs module. """ -import io import xml.etree.ElementTree as ET import pytest from src.modules.get_pcmk_properties_scs import HAClusterValidator, main @@ -28,132 +27,191 @@ DUMMY_XML_CRM = """ - + - - - """ DUMMY_XML_CONSTRAINTS = """ - - + + + """ DUMMY_XML_RESOURCES = """ + + - - - - + - - - - - - - - - - - - - - + + + + + + + + + + + + - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - - - - + + + + + + + + + + + + """ -DUMMY_OS_COMMAND = """kernel.numa_balancing = 0""" - -DUMMY_GLOBAL_INI = """[DEFAULT] -dumm1 = dummy2 +DUMMY_XML_FULL_CIB = f""" + + + {DUMMY_XML_CRM} + {DUMMY_XML_RSC} + {DUMMY_XML_OP} + {DUMMY_XML_CONSTRAINTS} + {DUMMY_XML_RESOURCES} + +""" -[ha_dr_provider_SAPHanaSR] -provider = SAPHanaSR -""" +DUMMY_OS_COMMAND = """kernel.numa_balancing = 0""" DUMMY_CONSTANTS = { "VALID_CONFIGS": { - "REDHAT": {"stonith-enabled": "true"}, + "REDHAT": {"stonith-enabled": "true", "cluster-name": "scs_S4D"}, "azure-fence-agent": {"priority": "10"}, + "sbd": {"pcmk_delay_max": "30"}, }, "RSC_DEFAULTS": { - "REDHAT": { - "resource-stickiness": "1000", - "migration-threshold": "5000", - } + "resource-stickiness": "1000", + "migration-threshold": "5000", }, "OP_DEFAULTS": { - "REDHAT": { - "timeout": "600", - "record-pending": "true", - } + "timeout": "600", + "record-pending": "true", + }, + "CRM_CONFIG_DEFAULTS": { + "stonith-enabled": "true", + "maintenance-mode": "false", }, - "CRM_CONFIG_DEFAULTS": {"stonith-enabled": "true"}, "RESOURCE_DEFAULTS": { "REDHAT": { - "stonith": { - "meta_attributes": {"priority": "10"}, - "operations": {"monitor": {"timeout": "30"}}, - }, - "hana": {"meta_attributes": {"clone-max": "2"}}, "fence_agent": { - "meta_attributes": {"pcmk_delay_max": "15"}, - "operations": {"monitor": {"timeout": ["700", "700s"]}}, - "instance_attributes": {"resourceGroup": "test-rg"}, + "meta_attributes": {"pcmk_delay_max": "15", "target-role": "Started"}, + "operations": { + "monitor": {"timeout": ["700", "700s"], "interval": "10"}, + "start": {"timeout": "20"}, + }, + "instance_attributes": {"login": "testuser", "resourceGroup": "test-rg"}, + }, + "sbd_stonith": { + "meta_attributes": {"pcmk_delay_max": "30", "target-role": "Started"}, + "operations": { + "monitor": {"timeout": ["30", "30s"], "interval": "10"}, + "start": {"timeout": "20"}, + }, + }, + "ascs": { + "meta_attributes": {"target-role": "Started"}, + "operations": {"monitor": {"timeout": ["600", "600s"]}}, + "instance_attributes": {"InstanceName": "S4D_ASCS00_sapascs"}, + }, + "ers": { + "meta_attributes": {"target-role": "Started"}, + "operations": {"monitor": {"timeout": ["600", "600s"]}}, + "instance_attributes": {"InstanceName": "S4D_ERS10_sapers"}, + }, + "ipaddr": { + "instance_attributes": {"ip": {"AFS": ["10.0.1.100"], "ANF": ["10.0.1.101"]}} }, } }, "OS_PARAMETERS": { "DEFAULTS": {"sysctl": {"kernel.numa_balancing": "kernel.numa_balancing = 0"}} }, - "GLOBAL_INI": {"REDHAT": {"provider": "SAPHanaSR"}}, - "CONSTRAINTS": {"rsc_location": {"score": "INFINITY"}}, + "CONSTRAINTS": { + "rsc_location": {"score": "INFINITY"}, + "rsc_colocation": {"score": "4000"}, + "rsc_order": {"kind": "Optional"}, + }, } -def fake_open_factory(file_content): +class MockExecuteCommand: """ - Factory function to create a fake open function that returns a StringIO object. - - :param file_content: Content to be returned by the fake open function. - :type file_content: str - :return: Fake open function. - :rtype: function + Mock class for execute_command_subprocess. """ - def fake_open(*args, **kwargs): - """ - Fake open function that returns a StringIO object. + def __init__(self, mock_outputs): + self.mock_outputs = mock_outputs - :param *args: Positional arguments. - :param **kwargs: Keyword arguments. - :return: _description_ - :rtype: _type_ - """ - return io.StringIO("\n".join(file_content)) + def __call__(self, command, shell_command=False): + command_str = " ".join(command) if isinstance(command, list) else str(command) + if "sysctl" in command_str: + return DUMMY_OS_COMMAND + if len(command) >= 2 and command[-1] in self.mock_outputs: + return self.mock_outputs[command[-1]] + return "" - return fake_open + +class TestableHAClusterValidator(HAClusterValidator): + """ + Testable version of HAClusterValidator with mocked dependencies. + """ + + def __init__(self, mock_execute_command, *args, **kwargs): + self._mock_execute_command = mock_execute_command + super().__init__(*args, **kwargs) + + def execute_command_subprocess(self, command, shell_command=False): + return self._mock_execute_command(command, shell_command) class TestHAClusterValidator: @@ -165,9 +223,6 @@ class TestHAClusterValidator: def mock_xml_outputs(self): """ Fixture for providing mock XML outputs. - - :return: Mock XML outputs. - :rtype: dict """ return { "rsc_defaults": DUMMY_XML_RSC, @@ -178,277 +233,305 @@ def mock_xml_outputs(self): } @pytest.fixture - def validator(self, monkeypatch, mock_xml_outputs): - """ - Fixture for creating a HAClusterValidator instance. - - :param monkeypatch: Monkeypatch fixture for mocking. - :type monkeypatch: pytest.MonkeyPatch - :param mock_xml_outputs: Mock XML outputs. - :type mock_xml_outputs: dict - :return: HAClusterValidator instance. - :rtype: HAClusterValidator - """ - - def mock_execute_command(*args, **kwargs): - """ - Mock function to replace execute_command_subprocess. - - :param *args: Positional arguments. - :param **kwargs: Keyword arguments. - :return: Mocked command output. - :rtype: str - """ - command = str(args[1]) if len(args) > 1 else str(kwargs.get("command")) - if "sysctl" in command: - return DUMMY_OS_COMMAND - return mock_xml_outputs.get(command[-1], "") - - monkeypatch.setattr( - "src.module_utils.sap_automation_qa.SapAutomationQA.execute_command_subprocess", - mock_execute_command, - ) - monkeypatch.setattr("builtins.open", fake_open_factory(DUMMY_GLOBAL_INI)) - return HAClusterValidator( + def validator(self, mock_xml_outputs): + """ + Fixture for creating a TestableHAClusterValidator instance. + """ + mock_execute = MockExecuteCommand(mock_xml_outputs) + return TestableHAClusterValidator( + mock_execute, os_type=OperatingSystemFamily.REDHAT, - sid="PRD", + sid="S4D", scs_instance_number="00", - ers_instance_number="01", - fencing_mechanism="AFA", + ers_instance_number="10", + fencing_mechanism="sbd", virtual_machine_name="vmname", constants=DUMMY_CONSTANTS, + cib_output="", + nfs_provider="AFS", ) - def test_parse_ha_cluster_config_success(self, validator): + @pytest.fixture + def validator_anf(self, mock_xml_outputs): """ - Test the parse_ha_cluster_config method for successful parsing. - - :param validator: HAClusterValidator instance. - :type validator: HAClusterValidator + Fixture for creating a validator with ANF provider. """ - result = validator.get_result() - assert result["status"] == "PASSED" + mock_execute = MockExecuteCommand(mock_xml_outputs) + return TestableHAClusterValidator( + mock_execute, + os_type=OperatingSystemFamily.REDHAT, + sid="S4D", + scs_instance_number="00", + ers_instance_number="10", + fencing_mechanism="sbd", + virtual_machine_name="vmname", + constants=DUMMY_CONSTANTS, + cib_output="", + nfs_provider="ANF", + ) - def test_main_method(self, monkeypatch): + @pytest.fixture + def validator_with_cib(self): """ - Test the main method of the module. - - :param monkeypatch: Monkeypatch fixture for mocking. - :type monkeypatch: + Fixture for creating a validator with CIB output. """ - mock_result = {} - - class MockAnsibleModule: - """ - Mock class to simulate AnsibleModule behavior. - """ - - def __init__(self, *args, **kwargs): - self.params = { - "sid": "PRD", - "ascs_instance_number": "00", - "ers_instance_number": "01", - "virtual_machine_name": "vm_name", - "fencing_mechanism": "AFA", - "pcmk_constants": DUMMY_CONSTANTS, - } - - def exit_json(self, **kwargs): - nonlocal mock_result - mock_result = kwargs - - def mock_ansible_facts(module): - """ - Mock function to return Ansible facts. - - :param module: Ansible module instance. - :type module: AnsibleModule - :return: Mocked Ansible facts. - :rtype: dict - """ - return {"os_family": "REDHAT"} - - monkeypatch.setattr( - "src.modules.get_pcmk_properties_scs.AnsibleModule", - MockAnsibleModule, - ) - monkeypatch.setattr( - "src.modules.get_pcmk_properties_scs.ansible_facts", - mock_ansible_facts, + return HAClusterValidator( + os_type=OperatingSystemFamily.REDHAT, + sid="S4D", + scs_instance_number="00", + ers_instance_number="10", + fencing_mechanism="sbd", + virtual_machine_name="vmname", + constants=DUMMY_CONSTANTS, + cib_output=DUMMY_XML_FULL_CIB, ) - main() - - assert mock_result["status"] == "PASSED" - - def test_get_expected_value_fence_config(self, validator): + def test_init(self, validator): """ - Test _get_expected_value method with fence configuration. + Test the __init__ method. """ - validator.fencing_mechanism = "azure-fence-agent" - expected = validator._get_expected_value("crm_config", "priority") - assert expected == "10" + assert validator.os_type == "REDHAT" + assert validator.sid == "S4D" + assert validator.scs_instance_number == "00" + assert validator.ers_instance_number == "10" + assert validator.nfs_provider == "AFS" - def test_get_resource_expected_value_meta_attributes(self, validator): + def test_get_expected_value_for_category_resource(self, validator): """ - Test _get_resource_expected_value method for meta_attributes section. + Test _get_expected_value_for_category method for resource category. """ - expected = validator._get_resource_expected_value( - "fence_agent", "meta_attributes", "pcmk_delay_max" + expected = validator._get_expected_value_for_category( + "fence_agent", "meta_attributes", "pcmk_delay_max", None ) assert expected == "15" - def test_create_parameter_with_none_expected_value_resource_category(self, validator): + def test_get_expected_value_for_category_ascs_ers(self, validator): """ - Test _create_parameter method when expected_value is None and category is - in RESOURCE_CATEGORIES. + Test _get_expected_value_for_category method for ASCS/ERS categories. """ - param = validator._create_parameter( - category="ipaddr", name="test_param", value="test_value", subcategory="meta_attributes" + expected = validator._get_expected_value_for_category( + "ascs", "meta_attributes", "target-role", None ) - assert param["category"] == "ipaddr_meta_attributes" + assert expected == "Started" + expected = validator._get_expected_value_for_category( + "ers", "meta_attributes", "target-role", None + ) + assert expected == "Started" - def test_create_parameter_with_none_expected_value_or_empty_value(self, validator): + def test_get_expected_value_for_category_basic(self, validator): """ - Test _create_parameter method when expected_value is None or value is empty. - + Test _get_expected_value_for_category method for basic category. """ - param = validator._create_parameter( - category="crm_config", name="test_param", value="test_value", expected_value=None + expected = validator._get_expected_value_for_category( + "crm_config", None, "stonith-enabled", None ) - assert param["status"] == TestStatus.INFO.value + assert expected == "true" - param = validator._create_parameter( - category="crm_config", name="test_param", value="", expected_value="expected" + def test_determine_parameter_status_with_dict_expected_value_anf(self, validator_anf): + """ + Test _determine_parameter_status method with dict expected value and ANF provider. + """ + status = validator_anf._determine_parameter_status( + "10.0.1.101", {"AFS": ["10.0.1.100"], "ANF": ["10.0.1.101"]} ) - assert param["status"] == TestStatus.INFO.value + assert status == TestStatus.SUCCESS.value - def test_parse_resource_with_meta_and_instance_attributes(self, validator): + def test_determine_parameter_status_info_cases(self, validator): """ - Test _parse_resource method with meta_attributes and instance_attributes. + Test _determine_parameter_status method for INFO status cases. """ - xml_str = """ - - - - - - - """ - element = ET.fromstring(xml_str) - - params = validator._parse_resource(element, "sbd_stonith") - - meta_params = [p for p in params if p["category"] == "sbd_stonith_meta_attributes"] - instance_params = [p for p in params if p["category"] == "sbd_stonith_instance_attributes"] - - assert len(meta_params) >= 1 - assert len(instance_params) >= 1 + status = validator._determine_parameter_status( + "10.0.1.102", {"AFS": ["10.0.1.100"], "ANF": ["10.0.1.101"]} + ) + assert status == TestStatus.ERROR.value + validator.nfs_provider = "UNKNOWN" + status = validator._determine_parameter_status( + "10.0.1.100", {"AFS": ["10.0.1.100"], "ANF": ["10.0.1.101"]} + ) + assert status == TestStatus.SUCCESS.value + status = validator._determine_parameter_status("500", ["600", "600s"]) + assert status == TestStatus.ERROR.value + status = validator._determine_parameter_status("value", None) + assert status == TestStatus.INFO.value + status = validator._determine_parameter_status("", "expected") + assert status == TestStatus.INFO.value + status = validator._determine_parameter_status("value", 123) + assert status == TestStatus.ERROR.value - def test_parse_basic_config(self, validator): + def test_parse_resources_section_with_ascs_ers_groups(self, validator): """ - Test _parse_basic_config method. + Test _parse_resources_section method with ASCS/ERS groups. """ - xml_str = """ - - - """ - element = ET.fromstring(xml_str) + xml_str = DUMMY_XML_RESOURCES + root = ET.fromstring(xml_str) + params = validator._parse_resources_section(root) + assert len(params) > 0 + categories = [p.get("category", "") for p in params] + ascs_found = any("ascs" in cat for cat in categories) + ers_found = any("ers" in cat for cat in categories) + assert ascs_found + assert ers_found - params = validator._parse_basic_config(element, "crm_config", "test_subcategory") + def test_parse_resources_section_all_resource_types(self, validator): + """ + Test _parse_resources_section method covers all resource types. + """ + xml_str = DUMMY_XML_RESOURCES + root = ET.fromstring(xml_str) + params = validator._parse_resources_section(root) + categories = [p.get("category", "") for p in params] + expected_categories = ["sbd_stonith", "fence_agent", "ipaddr", "azurelb", "azureevents"] + found_categories = [] + for cat in expected_categories: + if any(cat in category for category in categories): + found_categories.append(cat) - assert len(params) == 2 - assert params[0]["category"] == "crm_config_test_subcategory" - assert params[0]["name"] == "test_param" - assert params[0]["value"] == "test_value" + assert len(found_categories) > 0 - def test_parse_constraints_with_missing_attributes(self, validator): + def test_parse_ha_cluster_config_with_cib(self, validator_with_cib): """ - Test _parse_constraints method with missing attributes. + Test parse_ha_cluster_config method with CIB output. """ - xml_str = """ - - """ - root = ET.fromstring(xml_str) - params = validator._parse_constraints(root) - assert isinstance(params, list) + result = validator_with_cib.get_result() + assert result["status"] in [TestStatus.SUCCESS.value, TestStatus.ERROR.value] + assert "parameters" in result["details"] + assert "CIB output provided" in result["message"] - def test_parse_ha_cluster_config_with_empty_root(self, monkeypatch): + def test_main_with_ansible_module(self): """ - Test parse_ha_cluster_config method when root is empty. - Covers lines 508-546. + Test main function with successful AnsibleModule creation. """ + mock_result = {} - def mock_execute_command(*args, **kwargs): - return "" + class MockAnsibleModule: + def __init__(self, argument_spec=None, **kwargs): + self.params = { + "sid": "S4D", + "ascs_instance_number": "00", + "ers_instance_number": "10", + "virtual_machine_name": "vmname", + "pcmk_constants": DUMMY_CONSTANTS, + "fencing_mechanism": "sbd", + "nfs_provider": "AFS", + "cib_output": "", + "filter": "os_family", + } - monkeypatch.setattr( - "src.module_utils.sap_automation_qa.SapAutomationQA.execute_command_subprocess", - mock_execute_command, - ) + def exit_json(self, **kwargs): + nonlocal mock_result + mock_result = kwargs + def mock_ansible_facts(module): + return {"os_family": "SUSE"} + + import src.modules.get_pcmk_properties_scs as module_under_test + + original_ansible_module = module_under_test.AnsibleModule + original_ansible_facts = module_under_test.ansible_facts + module_under_test.AnsibleModule = MockAnsibleModule + module_under_test.ansible_facts = mock_ansible_facts + try: + main() + assert "status" in mock_result + assert "message" in mock_result + finally: + module_under_test.AnsibleModule = original_ansible_module + module_under_test.ansible_facts = original_ansible_facts + + def test_validator_initialization_calls_parse(self): + """ + Test that validator initialization calls parse_ha_cluster_config. + """ validator = HAClusterValidator( - os_type=OperatingSystemFamily.SUSE, - sid="PRD", + os_type=OperatingSystemFamily.REDHAT, + sid="S4D", scs_instance_number="00", - ers_instance_number="01", - fencing_mechanism="AFA", + ers_instance_number="10", + fencing_mechanism="sbd", virtual_machine_name="vmname", constants=DUMMY_CONSTANTS, + cib_output=DUMMY_XML_FULL_CIB, ) - result = validator.get_result() + assert "status" in result assert "details" in result - def test_get_resource_expected_value_operations_section(self, validator): + def test_resource_categories_defined(self, validator): """ - Test _get_resource_expected_value method for operations section. + Test that RESOURCE_CATEGORIES are properly defined. """ - expected = validator._get_resource_expected_value( - "fence_agent", "operations", "timeout", "monitor" - ) - assert expected == ["700", "700s"] + expected_categories = ["sbd_stonith", "fence_agent", "ipaddr", "azurelb", "azureevents"] + for category in expected_categories: + assert category in HAClusterValidator.RESOURCE_CATEGORIES + assert HAClusterValidator.RESOURCE_CATEGORIES[category].startswith(".//") - def test_get_resource_expected_value_return_none(self, validator): + def test_parse_constraints_with_location_constraints(self, validator): """ - Test _get_resource_expected_value method returns None for unknown section. + Test _parse_constraints method with location constraints. """ - expected = validator._get_resource_expected_value("fence_agent", "unknown_section", "param") - assert expected is None + xml_str = """ + + + + + """ + root = ET.fromstring(xml_str) + params = validator._parse_constraints(root) + location_params = [p for p in params if "rsc_location" in p["category"]] + colocation_params = [p for p in params if "rsc_colocation" in p["category"]] + order_params = [p for p in params if "rsc_order" in p["category"]] + assert len(location_params) >= 1 + assert len(colocation_params) >= 1 + assert len(order_params) >= 1 - def test_create_parameter_with_list_expected_value_success(self, validator): + def test_successful_validation_result(self, validator): """ - Test _create_parameter method with list expected value - success case. + Test that validator returns proper result structure. """ - param = validator._create_parameter( - category="test_category", - name="test_param", - value="value1", - expected_value=["value1", "value2"], - ) - assert param["status"] == TestStatus.SUCCESS.value - assert param["expected_value"] == "value1" + result = validator.get_result() + assert "status" in result + assert "message" in result + assert "details" in result + assert "parameters" in result["details"] + assert isinstance(result["details"]["parameters"], list) - def test_create_parameter_with_list_expected_value_error(self, validator): + def test_parse_resource_with_operations(self, validator): """ - Test _create_parameter method with list expected value - error case. + Test _parse_resource method with operations section. """ - param = validator._create_parameter( - category="test_category", - name="test_param", - value="value3", - expected_value=["value1", "value2"], - ) - assert param["status"] == TestStatus.ERROR.value + xml_str = """ + + + + + """ + element = ET.fromstring(xml_str) + params = validator._parse_resource(element, "ascs") + timeout_params = [p for p in params if p["name"].endswith("_timeout")] + interval_params = [p for p in params if p["name"].endswith("_interval")] + assert len(timeout_params) == 2 + assert len(interval_params) == 2 - def test_create_parameter_with_invalid_expected_value_type(self, validator): + def test_get_expected_value_methods_coverage(self, validator): """ - Test _create_parameter method with invalid expected value type. + Test inherited expected value methods for coverage. """ - param = validator._create_parameter( - category="test_category", - name="test_param", - value="test_value", - expected_value=123, + validator.fencing_mechanism = "azure-fence-agent" + expected = validator._get_expected_value("crm_config", "priority") + assert expected == "10" + expected = validator._get_expected_value("crm_config", "stonith-enabled") + assert expected == "true" + expected = validator._get_resource_expected_value( + "fence_agent", "meta_attributes", "pcmk_delay_max" ) - assert param["status"] == TestStatus.ERROR.value + assert expected == "15" + expected = validator._get_resource_expected_value( + "fence_agent", "operations", "timeout", "monitor" + ) + assert expected == ["700", "700s"] + expected = validator._get_resource_expected_value( + "fence_agent", "instance_attributes", "login" + ) + assert expected == "testuser" + expected = validator._get_resource_expected_value("fence_agent", "unknown_section", "param") + assert expected is None From 3b7ccec18165d4c3efeddb3179c83c86db78102e Mon Sep 17 00:00:00 2001 From: Devansh Jain <86314060+devanshjainms@users.noreply.github.com> Date: Thu, 10 Jul 2025 08:05:43 -0700 Subject: [PATCH 4/6] Add hana_resource_name parameter to HANA tasks to get hana resource name dynamically (#95) --- src/modules/get_cluster_status_db.py | 12 +++- src/roles/ha_db_hana/tasks/block-network.yml | 4 ++ src/roles/ha_db_hana/tasks/fs-freeze.yml | 2 + .../ha_db_hana/tasks/primary-crash-index.yml | 5 +- src/roles/ha_db_hana/tasks/primary-echo-b.yml | 3 + .../ha_db_hana/tasks/primary-node-crash.yml | 2 + .../ha_db_hana/tasks/primary-node-kill.yml | 3 + .../ha_db_hana/tasks/resource-migration.yml | 2 + src/roles/ha_db_hana/tasks/sbd-fencing.yml | 2 + .../tasks/secondary-crash-index.yml | 2 + .../ha_db_hana/tasks/secondary-echo-b.yml | 2 + .../ha_db_hana/tasks/secondary-node-kill.yml | 2 + src/roles/misc/tasks/cluster-report.yml | 1 + src/roles/misc/tasks/pre-validations-db.yml | 65 ++++++++++++++++++- tests/modules/get_cluster_status_db_test.py | 2 + tests/roles/ha_db_hana/block_network_test.py | 13 ++++ .../roles/ha_db_hana/primary_node_ops_test.py | 13 ++++ .../ha_db_hana/secondary_node_ops_test.py | 14 +++- .../roles/mock_data/get_cluster_status_db.txt | 1 + .../secondary_get_cluster_status_db.txt | 3 +- 20 files changed, 147 insertions(+), 6 deletions(-) diff --git a/src/modules/get_cluster_status_db.py b/src/modules/get_cluster_status_db.py index 2928890c..bc2da107 100644 --- a/src/modules/get_cluster_status_db.py +++ b/src/modules/get_cluster_status_db.py @@ -146,11 +146,13 @@ def __init__( db_instance_number: str, saphanasr_provider: HanaSRProvider, ansible_os_family: OperatingSystemFamily, + hana_resource_name: str = "", ): super().__init__(ansible_os_family) self.database_sid = database_sid self.saphanasr_provider = saphanasr_provider self.db_instance_number = db_instance_number + self.hana_resource_name = hana_resource_name self.result.update( { "primary_node": "", @@ -206,8 +208,12 @@ def _process_node_attributes(self, cluster_status_xml: ET.Element) -> Dict[str, }, HanaSRProvider.ANGI: { "clone_attr": f"hana_{self.database_sid}_clone_state", - "sync_attr": f"master-rsc_SAPHanaCon_{self.database_sid.upper()}" - + f"_HDB{self.db_instance_number}", + "sync_attr": ( + f"master-{self.hana_resource_name}" + if self.hana_resource_name + else f"master-rsc_SAPHanaCon_{self.database_sid.upper()}" + + f"_HDB{self.db_instance_number}" + ), "primary": {"clone": "PROMOTED", "sync": "150"}, "secondary": {"clone": "DEMOTED", "sync": "100"}, }, @@ -288,6 +294,7 @@ def run_module() -> None: database_sid=dict(type="str", required=True), saphanasr_provider=dict(type="str", required=True), db_instance_number=dict(type="str", required=True), + hana_resource_name=dict(type="str", required=False), filter=dict(type="str", required=False, default="os_family"), ) @@ -300,6 +307,7 @@ def run_module() -> None: str(ansible_facts(module).get("os_family", "UNKNOWN")).upper() ), db_instance_number=module.params["db_instance_number"], + hana_resource_name=module.params.get("hana_resource_name", ""), ) checker.run() diff --git a/src/roles/ha_db_hana/tasks/block-network.yml b/src/roles/ha_db_hana/tasks/block-network.yml index 98b39c02..6f9688b5 100644 --- a/src/roles/ha_db_hana/tasks/block-network.yml +++ b/src/roles/ha_db_hana/tasks/block-network.yml @@ -93,6 +93,7 @@ operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" + hana_resource_name: "{{ hana_resource_name | default('') }}" register: cluster_status_test_execution_primary retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -119,6 +120,7 @@ operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" + hana_resource_name: "{{ hana_resource_name | default('') }}" register: cluster_status_post_primary retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -138,6 +140,7 @@ operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" + hana_resource_name: "{{ hana_resource_name | default('') }}" register: cluster_status_test_execution_secondary retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -155,6 +158,7 @@ operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" + hana_resource_name: "{{ hana_resource_name | default('') }}" register: cluster_status_post_secondary retries: "{{ default_retries }}" delay: "{{ default_delay }}" diff --git a/src/roles/ha_db_hana/tasks/fs-freeze.yml b/src/roles/ha_db_hana/tasks/fs-freeze.yml index 5efa5cd0..80eae09e 100644 --- a/src/roles/ha_db_hana/tasks/fs-freeze.yml +++ b/src/roles/ha_db_hana/tasks/fs-freeze.yml @@ -59,6 +59,7 @@ operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" + hana_resource_name: "{{ hana_resource_name | default('') }}" register: cluster_status_test_execution retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -75,6 +76,7 @@ operation_step: "post_failover" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" + hana_resource_name: "{{ hana_resource_name | default('') }}" register: cluster_status_post retries: "{{ default_retries }}" delay: "{{ default_delay }}" diff --git a/src/roles/ha_db_hana/tasks/primary-crash-index.yml b/src/roles/ha_db_hana/tasks/primary-crash-index.yml index a3543ae5..ac98e9d5 100644 --- a/src/roles/ha_db_hana/tasks/primary-crash-index.yml +++ b/src/roles/ha_db_hana/tasks/primary-crash-index.yml @@ -57,6 +57,7 @@ operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" + hana_resource_name: "{{ hana_resource_name | default('') }}" register: cluster_status_test_execution retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -71,6 +72,7 @@ operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" + hana_resource_name: "{{ hana_resource_name | default('') }}" register: cluster_status_test_execution retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -116,10 +118,11 @@ - name: "Test Execution: Validate HANA DB cluster status 2" get_cluster_status_db: - db_instance_number: "{{ db_instance_number }}" + db_instance_number: "{{ db_instance_number }}" operation_step: "post_failover" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" + hana_resource_name: "{{ hana_resource_name | default('') }}" register: cluster_status_post retries: "{{ default_retries }}" delay: "{{ default_delay }}" diff --git a/src/roles/ha_db_hana/tasks/primary-echo-b.yml b/src/roles/ha_db_hana/tasks/primary-echo-b.yml index 173acecb..3a0634b0 100644 --- a/src/roles/ha_db_hana/tasks/primary-echo-b.yml +++ b/src/roles/ha_db_hana/tasks/primary-echo-b.yml @@ -49,6 +49,7 @@ operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" + hana_resource_name: "{{ hana_resource_name | default('') }}" retries: "{{ default_retries }}" delay: "{{ default_delay }}" register: cluster_status_test_execution @@ -63,6 +64,7 @@ operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" + hana_resource_name: "{{ hana_resource_name | default('') }}" retries: "{{ default_retries }}" delay: "{{ default_delay }}" register: cluster_status_test_execution @@ -108,6 +110,7 @@ operation_step: "post_failover" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" + hana_resource_name: "{{ hana_resource_name | default('') }}" register: cluster_status_post retries: "{{ default_retries }}" delay: "{{ default_delay }}" diff --git a/src/roles/ha_db_hana/tasks/primary-node-crash.yml b/src/roles/ha_db_hana/tasks/primary-node-crash.yml index 00089109..1a236183 100644 --- a/src/roles/ha_db_hana/tasks/primary-node-crash.yml +++ b/src/roles/ha_db_hana/tasks/primary-node-crash.yml @@ -45,6 +45,7 @@ operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" + hana_resource_name: "{{ hana_resource_name | default('') }}" register: cluster_status_test_execution retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -88,6 +89,7 @@ operation_step: "post_failover" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" + hana_resource_name: "{{ hana_resource_name | default('') }}" register: cluster_status_post retries: "{{ default_retries }}" delay: "{{ default_delay }}" diff --git a/src/roles/ha_db_hana/tasks/primary-node-kill.yml b/src/roles/ha_db_hana/tasks/primary-node-kill.yml index 5eca4111..368dc1d1 100644 --- a/src/roles/ha_db_hana/tasks/primary-node-kill.yml +++ b/src/roles/ha_db_hana/tasks/primary-node-kill.yml @@ -46,6 +46,7 @@ operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" + hana_resource_name: "{{ hana_resource_name | default('') }}" register: cluster_status_test_execution retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -62,6 +63,7 @@ operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" + hana_resource_name: "{{ hana_resource_name | default('') }}" register: cluster_status_test_execution retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -106,6 +108,7 @@ operation_step: "post_failover" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" + hana_resource_name: "{{ hana_resource_name | default('') }}" register: cluster_status_post retries: "{{ default_retries }}" delay: "{{ default_delay }}" diff --git a/src/roles/ha_db_hana/tasks/resource-migration.yml b/src/roles/ha_db_hana/tasks/resource-migration.yml index d76f1248..0a1c380f 100644 --- a/src/roles/ha_db_hana/tasks/resource-migration.yml +++ b/src/roles/ha_db_hana/tasks/resource-migration.yml @@ -104,6 +104,7 @@ operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" + hana_resource_name: "{{ hana_resource_name | default('') }}" register: cluster_status_test_execution retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -159,6 +160,7 @@ operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" + hana_resource_name: "{{ hana_resource_name | default('') }}" register: cluster_status_test_execution_1 retries: "{{ default_retries }}" delay: "{{ default_delay }}" diff --git a/src/roles/ha_db_hana/tasks/sbd-fencing.yml b/src/roles/ha_db_hana/tasks/sbd-fencing.yml index aa794ec4..a08e58c9 100644 --- a/src/roles/ha_db_hana/tasks/sbd-fencing.yml +++ b/src/roles/ha_db_hana/tasks/sbd-fencing.yml @@ -60,6 +60,7 @@ operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" + hana_resource_name: "{{ hana_resource_name | default('') }}" retries: "{{ default_retries }}" delay: "{{ default_delay }}" register: cluster_status_test_execution @@ -76,6 +77,7 @@ operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" + hana_resource_name: "{{ hana_resource_name | default('') }}" register: cluster_status_test_post retries: "{{ default_retries }}" delay: "{{ default_delay }}" diff --git a/src/roles/ha_db_hana/tasks/secondary-crash-index.yml b/src/roles/ha_db_hana/tasks/secondary-crash-index.yml index 986c7d96..59515bea 100644 --- a/src/roles/ha_db_hana/tasks/secondary-crash-index.yml +++ b/src/roles/ha_db_hana/tasks/secondary-crash-index.yml @@ -57,6 +57,7 @@ operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" + hana_resource_name: "{{ hana_resource_name | default('') }}" register: cluster_status_test_execution retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -70,6 +71,7 @@ operation_step: "post_failover" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" + hana_resource_name: "{{ hana_resource_name | default('') }}" register: cluster_status_post retries: "{{ default_retries }}" delay: "{{ default_delay }}" diff --git a/src/roles/ha_db_hana/tasks/secondary-echo-b.yml b/src/roles/ha_db_hana/tasks/secondary-echo-b.yml index 1836717d..c5ef6da1 100644 --- a/src/roles/ha_db_hana/tasks/secondary-echo-b.yml +++ b/src/roles/ha_db_hana/tasks/secondary-echo-b.yml @@ -53,6 +53,7 @@ operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" + hana_resource_name: "{{ hana_resource_name | default('') }}" retries: "{{ default_retries }}" delay: "{{ default_delay }}" register: cluster_status_test_execution @@ -66,6 +67,7 @@ operation_step: "post_failover" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" + hana_resource_name: "{{ hana_resource_name | default('') }}" register: cluster_status_post retries: "{{ default_retries }}" delay: "{{ default_delay }}" diff --git a/src/roles/ha_db_hana/tasks/secondary-node-kill.yml b/src/roles/ha_db_hana/tasks/secondary-node-kill.yml index 19d65184..3c3e510c 100644 --- a/src/roles/ha_db_hana/tasks/secondary-node-kill.yml +++ b/src/roles/ha_db_hana/tasks/secondary-node-kill.yml @@ -51,6 +51,7 @@ operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" + hana_resource_name: "{{ hana_resource_name | default('') }}" register: cluster_status_test_execution retries: "{{ default_retries }}" delay: "{{ default_delay }}" @@ -64,6 +65,7 @@ operation_step: "test_execution" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" + hana_resource_name: "{{ hana_resource_name | default('') }}" register: cluster_status_post retries: "{{ default_retries }}" delay: "{{ default_delay }}" diff --git a/src/roles/misc/tasks/cluster-report.yml b/src/roles/misc/tasks/cluster-report.yml index f37a8440..4467980c 100644 --- a/src/roles/misc/tasks/cluster-report.yml +++ b/src/roles/misc/tasks/cluster-report.yml @@ -12,6 +12,7 @@ operation_step: "cluster_report_collection" database_sid: "{{ db_sid | lower | default('') }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" + hana_resource_name: "{{ hana_resource_name | default('') }}" register: cluster_status failed_when: cluster_status.primary_node == "" diff --git a/src/roles/misc/tasks/pre-validations-db.yml b/src/roles/misc/tasks/pre-validations-db.yml index 614a58ef..da6f1435 100644 --- a/src/roles/misc/tasks/pre-validations-db.yml +++ b/src/roles/misc/tasks/pre-validations-db.yml @@ -18,6 +18,69 @@ when: (ansible_os_family | upper) == "SUSE" ansible.builtin.include_tasks: "roles/misc/tasks/get-saphanasr-provider.yml" + - name: "Pre validation: Get HANA resource id for saphanasr_angi" + when: saphanasr_provider | default('SAPHanaSR') == "SAPHanaSR-angi" + block: + - name: "Pre validation: Get HANA resource id for saphanasr_angi" + become: true + ansible.builtin.shell: >- + set -o pipefail && {{ commands + | selectattr('name','equalto','get_hana_resource_id_saphanasr_angi') + | map(attribute=(ansible_os_family|upper)) + | first + }} + args: + executable: /bin/bash + changed_when: false + register: hana_resource_id + failed_when: hana_resource_id.rc != 0 + + - name: "Pre validation: Set fact the hana_resource_name" + ansible.builtin.set_fact: + hana_resource_name: "{{ hana_resource_id.stdout }}" + + - name: "Pre validation: Get HANA resource id" + when: saphanasr_provider | default('SAPHanaSR') == "SAPHanaSR" + block: + - name: "Try master resource ID" + become: true + ansible.builtin.shell: >- + set -o pipefail && {{ commands + | selectattr('name','equalto','get_hana_resource_id') + | map(attribute=(ansible_os_family|upper)) + | first + }} + args: + executable: /bin/bash + changed_when: false + register: hana_resource_id + failed_when: hana_resource_id.rc != 0 + rescue: + - name: "Try clone resource ID" + become: true + ansible.builtin.shell: >- + set -o pipefail && {{ commands + | selectattr('name','equalto','get_hana_resource_id') + | map(attribute='REDHAT') + | first + }} + args: + executable: /bin/bash + changed_when: false + register: hana_resource_id + failed_when: hana_resource_id.rc != 0 + ignore_errors: true + always: + - name: "Test Execution: Set the resource name" + when: + - hana_resource_id.rc == 0 + - hana_resource_id.stdout is defined + - hana_resource_id.stdout | type_debug != 'NoneType' + - hana_resource_id.stdout | trim | length > 1 + ansible.builtin.set_fact: + hana_resource_name: "{{ hana_resource_id.stdout }}" + + - name: "Pre Validation: Validate HANA DB cluster status on primary node" become: true get_cluster_status_db: @@ -25,10 +88,10 @@ operation_step: "pre_failover" database_sid: "{{ db_sid | lower }}" saphanasr_provider: "{{ saphanasr_provider | default('SAPHanaSR') }}" + hana_resource_name: "{{ hana_resource_name | default('') }}" register: cluster_status_pre until: cluster_status_pre.primary_node != "" or cluster_status_pre.secondary_node != "" - timeout: 5 retries: 3 - name: "Pre Validation: CleanUp any failed resource" diff --git a/tests/modules/get_cluster_status_db_test.py b/tests/modules/get_cluster_status_db_test.py index 86340430..b7c413fc 100644 --- a/tests/modules/get_cluster_status_db_test.py +++ b/tests/modules/get_cluster_status_db_test.py @@ -32,6 +32,7 @@ def hana_checker_classic(self): ansible_os_family=OperatingSystemFamily.REDHAT, saphanasr_provider=HanaSRProvider.SAPHANASR, db_instance_number="00", + hana_resource_name="rsc_SAPHanaCon_TEST_HDB00", ) @pytest.fixture @@ -47,6 +48,7 @@ def hana_checker_angi(self): ansible_os_family=OperatingSystemFamily.SUSE, saphanasr_provider=HanaSRProvider.ANGI, db_instance_number="00", + hana_resource_name="rsc_SAPHanaCon_TEST_HDB00", ) def test_get_automation_register(self, mocker, hana_checker_classic): diff --git a/tests/roles/ha_db_hana/block_network_test.py b/tests/roles/ha_db_hana/block_network_test.py index 021854ec..8de1d413 100644 --- a/tests/roles/ha_db_hana/block_network_test.py +++ b/tests/roles/ha_db_hana/block_network_test.py @@ -32,6 +32,17 @@ def test_environment(self, ansible_inventory): :type: str """ + commands = [ + { + "name": "get_hana_resource_id", + "SUSE": "cibadmin --query --scope resources", + }, + { + "name": "get_hana_resource_id_saphanasr_angi", + "SUSE": "cibadmin --query --scope resources", + }, + ] + task_counter_file = "/tmp/get_cluster_status_counter_block-network" if os.path.exists(task_counter_file): os.remove(task_counter_file) @@ -51,6 +62,7 @@ def test_environment(self, ansible_inventory): "bin/nc", "bin/echo", "bin/sleep", + "bin/cibadmin", "bin/SAPHanaSR-manageProvider", ] @@ -65,6 +77,7 @@ def test_environment(self, ansible_inventory): "NFS_provider": "ANF", "database_cluster_type": "ISCSI", "sap_port_to_ping": "1128", + "commands": commands, }, ) diff --git a/tests/roles/ha_db_hana/primary_node_ops_test.py b/tests/roles/ha_db_hana/primary_node_ops_test.py index ec441c7d..a265c1d0 100644 --- a/tests/roles/ha_db_hana/primary_node_ops_test.py +++ b/tests/roles/ha_db_hana/primary_node_ops_test.py @@ -98,6 +98,17 @@ def test_environment(self, ansible_inventory, task_type): :ytype: str """ + commands = [ + { + "name": "get_hana_resource_id", + "SUSE": "cibadmin --query --scope resources", + }, + { + "name": "get_hana_resource_id_saphanasr_angi", + "SUSE": "cibadmin --query --scope resources", + }, + ] + task_counter_file = f"/tmp/get_cluster_status_counter_{task_type['task_name']}" if os.path.exists(task_counter_file): os.remove(task_counter_file) @@ -113,6 +124,7 @@ def test_environment(self, ansible_inventory, task_type): "bin/crm", "bin/echo", "bin/killall", + "bin/cibadmin", "bin/SAPHanaSR-manageProvider", ] @@ -129,6 +141,7 @@ def test_environment(self, ansible_inventory, task_type): "node_tier": "hana", "NFS_provider": "ANF", "database_cluster_type": "ISCSI", + "commands": commands, }, ) diff --git a/tests/roles/ha_db_hana/secondary_node_ops_test.py b/tests/roles/ha_db_hana/secondary_node_ops_test.py index 2ffe698f..c24d1558 100644 --- a/tests/roles/ha_db_hana/secondary_node_ops_test.py +++ b/tests/roles/ha_db_hana/secondary_node_ops_test.py @@ -69,6 +69,17 @@ def test_environment(self, ansible_inventory, task_type): :ytype: str """ + commands = [ + { + "name": "get_hana_resource_id", + "SUSE": "cibadmin --query --scope resources", + }, + { + "name": "get_hana_resource_id_saphanasr_angi", + "SUSE": "cibadmin --query --scope resources", + }, + ] + task_counter_file = f"/tmp/get_cluster_status_counter_{task_type['task_name']}" if os.path.exists(task_counter_file): os.remove(task_counter_file) @@ -87,9 +98,10 @@ def test_environment(self, ansible_inventory, task_type): "bin/crm_resource", "bin/echo", "bin/killall", + "bin/cibadmin", "bin/SAPHanaSR-manageProvider", ], - extra_vars_override={"node_tier": "hana"}, + extra_vars_override={"node_tier": "hana", "commands": commands}, ) os.makedirs(f"{temp_dir}/bin", exist_ok=True) diff --git a/tests/roles/mock_data/get_cluster_status_db.txt b/tests/roles/mock_data/get_cluster_status_db.txt index 2658cf76..24179b7f 100644 --- a/tests/roles/mock_data/get_cluster_status_db.txt +++ b/tests/roles/mock_data/get_cluster_status_db.txt @@ -11,6 +11,7 @@ def main(): database_sid=dict(type="str", required=True), saphanasr_provider=dict(type="str", required=True), db_instance_number=dict(type="str", required=True), + hana_resource_name=dict(type="str", default="") ) ) diff --git a/tests/roles/mock_data/secondary_get_cluster_status_db.txt b/tests/roles/mock_data/secondary_get_cluster_status_db.txt index afbec0a7..131c717f 100644 --- a/tests/roles/mock_data/secondary_get_cluster_status_db.txt +++ b/tests/roles/mock_data/secondary_get_cluster_status_db.txt @@ -10,7 +10,8 @@ def main(): operation_step=dict(type="str", required=True), database_sid=dict(type="str", required=True), saphanasr_provider=dict(type="str", required=True), - db_instance_number=dict(type="str", required=True) + db_instance_number=dict(type="str", required=True), + hana_resource_name=dict(type="str", default="") ) ) From fc32782726e6c96d1156b28bfae9263a27e171e6 Mon Sep 17 00:00:00 2001 From: Devansh Jain <86314060+devanshjainms@users.noreply.github.com> Date: Fri, 11 Jul 2025 09:45:22 -0700 Subject: [PATCH 5/6] Add a rescue operation to handle network-related failures (#96) --- docs/HIGH_AVAILABILITY.md | 2 +- requirements.txt | 14 +++++++------- scripts/sap_automation_qa.sh | 15 +++++++++++---- src/module_utils/get_pcmk_properties.py | 2 ++ src/modules/get_pcmk_properties_scs.py | 2 +- src/roles/ha_db_hana/tasks/block-network.yml | 11 +++++++++++ src/roles/ha_scs/tasks/files/constants.yaml | 2 +- 7 files changed, 34 insertions(+), 14 deletions(-) diff --git a/docs/HIGH_AVAILABILITY.md b/docs/HIGH_AVAILABILITY.md index 33c5309e..111c45cf 100644 --- a/docs/HIGH_AVAILABILITY.md +++ b/docs/HIGH_AVAILABILITY.md @@ -249,7 +249,7 @@ db_sid: "your-db-sid" # Boolean indicating if the SCS and database is configured as highly available. scs_high_availability: true -db_high_availability: true +database_high_availability: true # The high availability configuration of the SCS and DB instance. Supported values are: # - AFA (for Azure Fencing Agent) diff --git a/requirements.txt b/requirements.txt index 39c52772..26cabe3c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -23,7 +23,7 @@ attrs==25.3.0 # referencing azure-common==1.1.28 # via azure-mgmt-network -azure-core==1.34.0 +azure-core==1.35.0 # via # azure-identity # azure-kusto-data @@ -40,7 +40,7 @@ azure-kusto-data==5.0.4 # azure-kusto-ingest azure-kusto-ingest==5.0.4 # via -r requirements.in -azure-mgmt-core==1.5.0 +azure-mgmt-core==1.6.0 # via azure-mgmt-network azure-mgmt-network==29.0.0 # via -r requirements.in @@ -58,7 +58,7 @@ black==25.1.0 # ansible-lint bracex==2.6 # via wcmatch -certifi==2025.6.15 +certifi==2025.7.9 # via requests cffi==1.17.1 # via cryptography @@ -68,11 +68,11 @@ click==8.2.1 # via # -r requirements.in # black -coverage[toml]==7.9.1 +coverage[toml]==7.9.2 # via # -r requirements.in # pytest-cov -cryptography==45.0.4 +cryptography==45.0.5 # via # ansible-core # azure-identity @@ -144,7 +144,7 @@ packaging==25.0 # ansible-runner # black # pytest -pandas==2.3.0 +pandas==2.3.1 # via -r requirements.in pathspec==0.12.1 # via @@ -241,7 +241,7 @@ tomli==2.2.1 # pytest tomlkit==0.13.3 # via pylint -typing-extensions==4.14.0 +typing-extensions==4.14.1 # via # astroid # azure-core diff --git a/scripts/sap_automation_qa.sh b/scripts/sap_automation_qa.sh index 7f0aa08c..dd57120f 100755 --- a/scripts/sap_automation_qa.sh +++ b/scripts/sap_automation_qa.sh @@ -301,7 +301,9 @@ run_ansible_playbook() { local filtered_config filtered_config=$(get_filtered_test_config) if [[ -n "$filtered_config" ]]; then - extra_vars="--extra-vars '$filtered_config'" + local temp_config_file=$(mktemp) + echo "$filtered_config" > "$temp_config_file" + extra_vars="--extra-vars @$temp_config_file" fi fi @@ -384,14 +386,14 @@ run_ansible_playbook() { check_file_exists "$temp_file" \ "Temporary password file not found. Please check the Key Vault secret ID." command="ansible-playbook ${cmd_dir}/../src/$playbook_name.yml -i $system_hosts \ - --extra-vars \"ansible_ssh_pass=$(cat $temp_file)\" --extra-vars @$VARS_FILE -e @$system_params \ + --extra-vars 'ansible_ssh_pass=$(cat $temp_file)' --extra-vars @$VARS_FILE -e @$system_params \ -e '_workspace_directory=$system_config_folder' $extra_vars" else local password_file="${cmd_dir}/../WORKSPACES/SYSTEM/$SYSTEM_CONFIG_NAME/password" check_file_exists "$password_file" \ "password file not found in WORKSPACES/SYSTEM/$SYSTEM_CONFIG_NAME directory." command="ansible-playbook ${cmd_dir}/../src/$playbook_name.yml -i $system_hosts \ - --extra-vars \"ansible_ssh_pass=$(cat $password_file)\" --extra-vars @$VARS_FILE -e @$system_params \ + --extra-vars 'ansible_ssh_pass=$(cat $password_file)' --extra-vars @$VARS_FILE -e @$system_params \ -e '_workspace_directory=$system_config_folder' $extra_vars" fi @@ -411,11 +413,16 @@ run_ansible_playbook() { return_code=$? log "INFO" "Ansible playbook execution completed with return code: $return_code" - # Clean up temporary file if it exists + # Clean up temporary files if they exist if [[ -n "$temp_file" && -f "$temp_file" ]]; then rm -f "$temp_file" log "INFO" "Temporary file deleted: $temp_file" fi + + if [[ -n "$temp_config_file" && -f "$temp_config_file" ]]; then + rm -f "$temp_config_file" + log "INFO" "Temporary config file deleted: $temp_config_file" + fi exit $return_code } diff --git a/src/module_utils/get_pcmk_properties.py b/src/module_utils/get_pcmk_properties.py index 733ce134..2b90c7e1 100644 --- a/src/module_utils/get_pcmk_properties.py +++ b/src/module_utils/get_pcmk_properties.py @@ -165,6 +165,8 @@ def _create_parameter( if isinstance(expected_value, list): expected_value = expected_value[0] if expected_value else "" + elif isinstance(expected_value, dict): + expected_value = list(expected_value.values())[0] if expected_value else "" return Parameters( category=f"{category}_{subcategory}" if subcategory else category, diff --git a/src/modules/get_pcmk_properties_scs.py b/src/modules/get_pcmk_properties_scs.py index 304f2005..8b3d95d5 100644 --- a/src/modules/get_pcmk_properties_scs.py +++ b/src/modules/get_pcmk_properties_scs.py @@ -324,7 +324,7 @@ def main() -> None: sid=module.params["sid"], scs_instance_number=module.params["ascs_instance_number"], ers_instance_number=module.params["ers_instance_number"], - os_type=OperatingSystemFamily(os_family), + os_type=OperatingSystemFamily(os_family.upper()), virtual_machine_name=module.params["virtual_machine_name"], constants=module.params["pcmk_constants"], fencing_mechanism=module.params["fencing_mechanism"], diff --git a/src/roles/ha_db_hana/tasks/block-network.yml b/src/roles/ha_db_hana/tasks/block-network.yml index 6f9688b5..4ed49b67 100644 --- a/src/roles/ha_db_hana/tasks/block-network.yml +++ b/src/roles/ha_db_hana/tasks/block-network.yml @@ -203,6 +203,17 @@ ansible.builtin.include_tasks: "roles/misc/tasks/post-validations.yml" rescue: + - name: "Test Execution Failure: Remove the firewall rule on primary node." + become: true + ansible.builtin.shell: | + iptables -D INPUT -s {{ secondary_node_ip }} -j DROP; + iptables -D OUTPUT -d {{ secondary_node_ip }} -j DROP + register: firewall_rule_deleted + changed_when: firewall_rule_deleted.rc == 0 + failed_when: false + when: ansible_hostname == cluster_status_pre.primary_node and + secondary_node_ip is defined + - name: "Rescue operation" ansible.builtin.include_tasks: "roles/misc/tasks/rescue.yml" diff --git a/src/roles/ha_scs/tasks/files/constants.yaml b/src/roles/ha_scs/tasks/files/constants.yaml index 48d8aa0a..a50da29b 100644 --- a/src/roles/ha_scs/tasks/files/constants.yaml +++ b/src/roles/ha_scs/tasks/files/constants.yaml @@ -334,7 +334,7 @@ OS_PARAMETERS: DEFAULTS: sysctl: net.ipv4.tcp_timestamps: "net.ipv4.tcp_timestamps = 0" - vm.swappiness: "vm.swappiness = 60" + vm.swappiness: "vm.swappiness = 10" corosync-cmapctl: runtime.config.totem.token: "runtime.config.totem.token (u32) = 30000" runtime.config.totem.consensus: "runtime.config.totem.consensus (u32) = 36000" From d4ee27cb84d32b6e55034528d4fbfb7dd55bb3f4 Mon Sep 17 00:00:00 2001 From: Devansh Jain <86314060+devanshjainms@users.noreply.github.com> Date: Fri, 8 Aug 2025 09:11:57 -0700 Subject: [PATCH 6/6] Update package versions and script documentation (#100) --- .github/workflows/codeql.yml | 6 ++-- .github/workflows/ossf-scoreboard.yml | 2 +- .github/workflows/trivy.yml | 2 +- requirements.txt | 18 +++++----- scripts/sap_automation_qa.sh | 34 ++++++++++++++++++- src/module_utils/get_pcmk_properties.py | 10 +++++- .../ha_db_hana/tasks/files/constants.yaml | 2 +- src/roles/ha_scs/tasks/files/constants.yaml | 2 +- 8 files changed, 58 insertions(+), 18 deletions(-) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 87decb17..5a2e8a50 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -46,14 +46,14 @@ jobs: uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Initialize CodeQL - uses: github/codeql-action/init@28deaeda66b76a05916b6923827895f2b14ab387 # v3.28.16 + uses: github/codeql-action/init@07bb2b932c90fc1ec97637495e4072a0966fa74c # v3.28.20 with: languages: ${{ matrix.language }} - name: Autobuild - uses: github/codeql-action/autobuild@28deaeda66b76a05916b6923827895f2b14ab387 # v3.28.16 + uses: github/codeql-action/autobuild@07bb2b932c90fc1ec97637495e4072a0966fa74c # v3.28.20 - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@28deaeda66b76a05916b6923827895f2b14ab387 # v3.28.16 + uses: github/codeql-action/analyze@07bb2b932c90fc1ec97637495e4072a0966fa74c # v3.28.20 with: category: "/language:${{matrix.language}}" diff --git a/.github/workflows/ossf-scoreboard.yml b/.github/workflows/ossf-scoreboard.yml index ddc3895d..a6e09be7 100644 --- a/.github/workflows/ossf-scoreboard.yml +++ b/.github/workflows/ossf-scoreboard.yml @@ -50,6 +50,6 @@ jobs: retention-days: 5 - name: "Upload to code-scanning" - uses: github/codeql-action/upload-sarif@28deaeda66b76a05916b6923827895f2b14ab387 # v3.28.16 + uses: github/codeql-action/upload-sarif@07bb2b932c90fc1ec97637495e4072a0966fa74c # v3.28.20 with: sarif_file: results.sarif diff --git a/.github/workflows/trivy.yml b/.github/workflows/trivy.yml index f28eeb98..0ed1d400 100644 --- a/.github/workflows/trivy.yml +++ b/.github/workflows/trivy.yml @@ -36,7 +36,7 @@ jobs: output: report-fs.sarif - name: Upload Trivy report (fs) GitHub Security - uses: github/codeql-action/upload-sarif@28deaeda66b76a05916b6923827895f2b14ab387 # v3.28.16 + uses: github/codeql-action/upload-sarif@07bb2b932c90fc1ec97637495e4072a0966fa74c # v3.28.20 with: sarif_file: report-fs.sarif category: 'fs' diff --git a/requirements.txt b/requirements.txt index 26cabe3c..3a5f1874 100644 --- a/requirements.txt +++ b/requirements.txt @@ -6,7 +6,7 @@ # ansible-compat==25.6.0 # via ansible-lint -ansible-core==2.17.12 +ansible-core==2.17.13 # via # -r requirements.in # ansible-compat @@ -15,7 +15,7 @@ ansible-lint==25.6.1 # via -r requirements.in ansible-runner==2.4.1 # via -r requirements.in -astroid==3.3.10 +astroid==3.3.11 # via pylint attrs==25.3.0 # via @@ -30,15 +30,15 @@ azure-core==1.35.0 # azure-mgmt-core # azure-storage-blob # azure-storage-queue -azure-identity==1.23.0 +azure-identity==1.23.1 # via # -r requirements.in # azure-kusto-data -azure-kusto-data==5.0.4 +azure-kusto-data==5.0.5 # via # -r requirements.in # azure-kusto-ingest -azure-kusto-ingest==5.0.4 +azure-kusto-ingest==5.0.5 # via -r requirements.in azure-mgmt-core==1.6.0 # via azure-mgmt-network @@ -58,7 +58,7 @@ black==25.1.0 # ansible-lint bracex==2.6 # via wcmatch -certifi==2025.7.9 +certifi==2025.7.14 # via requests cffi==1.17.1 # via cryptography @@ -68,7 +68,7 @@ click==8.2.1 # via # -r requirements.in # black -coverage[toml]==7.9.2 +coverage[toml]==7.10.0 # via # -r requirements.in # pytest-cov @@ -107,7 +107,7 @@ jinja2==3.1.6 # ansible-core jmespath==1.0.1 # via -r requirements.in -jsonschema==4.24.0 +jsonschema==4.25.0 # via # ansible-compat # ansible-lint @@ -123,7 +123,7 @@ mccabe==0.7.0 # via pylint mdurl==0.1.2 # via markdown-it-py -msal==1.32.3 +msal==1.33.0 # via # azure-identity # azure-kusto-data diff --git a/scripts/sap_automation_qa.sh b/scripts/sap_automation_qa.sh index dd57120f..6d0a0bd8 100755 --- a/scripts/sap_automation_qa.sh +++ b/scripts/sap_automation_qa.sh @@ -84,7 +84,11 @@ Options: --test_groups=GROUP Specify test group to run (e.g., HA_DB_HANA, HA_SCS) --test_cases=[case1,case2] Specify specific test cases to run (comma-separated, in brackets) --extra-vars=VAR Specify additional Ansible extra variables (e.g., --extra-vars='{"key":"value"}') - --offline Run offline test cases using previously collected CIB data + --offline Run offline test cases using previously collected CIB data. + While running offline tests, the script will look for CIB data in + WORKSPACES/SYSTEM//offline_validation directory. + Extra vars "ansible_os_family" required for offline mode + (e.g., --extra-vars='{"ansible_os_family":"SUSE"}') -h, --help Show this help message Examples: @@ -94,6 +98,34 @@ Examples: $0 --test_groups=HA_DB_HANA --test_cases=[ha-config,primary-node-crash] --extra-vars='{"key":"value"}' $0 --test_groups=HA_DB_HANA --test_cases=[ha-config] --offline +Available Test Cases for groups: + $0 --test_groups=HA_DB_HANA + ha-config => High Availability configuration + azure-lb => Azure Load Balancer + resource-migration => Resource Migration + primary-node-crash => Primary Node Crash + block-network => Block Network + primary-crash-index => Primary Crash Index + primary-node-kill => Primary Node Kill + primary-echo-b => Primary Echo B + secondary-node-kill => Secondary Node Kill + secondary-echo-b => Secondary Echo B + fs-freeze => FS Freeze + sbd-fencing => SBD Fencing + secondary-crash-index => Secondary Crash Index + $0 --test_groups=HA_SCS + ha-config => High Availability configuration + azure-lb => Azure Load Balancer + sapcontrol-config => SAP Control Configuration + ascs-migration => ASCS Migration + block-network => Block Network + kill-message-server => Kill Message Server + kill-enqueue-server => Kill Enqueue Server + kill-enqueue-replication => Kill Enqueue Replication + kill-sapstartsrv-process => Kill SAP Start Service Process + manual-restart => Manual Restart + ha-failover-to-node => HA Failover to Secondary Node + Configuration is read from vars.yaml file. EOF } diff --git a/src/module_utils/get_pcmk_properties.py b/src/module_utils/get_pcmk_properties.py index 2b90c7e1..dc796165 100644 --- a/src/module_utils/get_pcmk_properties.py +++ b/src/module_utils/get_pcmk_properties.py @@ -166,7 +166,15 @@ def _create_parameter( if isinstance(expected_value, list): expected_value = expected_value[0] if expected_value else "" elif isinstance(expected_value, dict): - expected_value = list(expected_value.values())[0] if expected_value else "" + expected_value = ( + [ + item + for val in expected_value.values() + for item in (val if isinstance(val, list) else [val]) + ] + if expected_value + else "" + ) return Parameters( category=f"{category}_{subcategory}" if subcategory else category, diff --git a/src/roles/ha_db_hana/tasks/files/constants.yaml b/src/roles/ha_db_hana/tasks/files/constants.yaml index 9d8a8380..fa103cdf 100644 --- a/src/roles/ha_db_hana/tasks/files/constants.yaml +++ b/src/roles/ha_db_hana/tasks/files/constants.yaml @@ -50,7 +50,7 @@ VALID_CONFIGS: stonith-timeout: ["900s", "900"] ISCSI: have-watchdog: "true" - stonith-timeout: ["144", "144s"] + stonith-timeout: ["210", "210s"] # === Resource Defaults === diff --git a/src/roles/ha_scs/tasks/files/constants.yaml b/src/roles/ha_scs/tasks/files/constants.yaml index a50da29b..8fec70fc 100644 --- a/src/roles/ha_scs/tasks/files/constants.yaml +++ b/src/roles/ha_scs/tasks/files/constants.yaml @@ -58,7 +58,7 @@ VALID_CONFIGS: stonith-timeout: ["900", "900s"] ISCSI: have-watchdog: "true" - stonith-timeout: ["144", "144s"] + stonith-timeout: ["210", "210s"] # === Resource Defaults === # cibadmin --query --scope resources