diff --git a/inventory/group_vars/docker_stack_control.yml b/inventory/group_vars/docker_stack_control.yml index e3237ceda..9cf767feb 100644 --- a/inventory/group_vars/docker_stack_control.yml +++ b/inventory/group_vars/docker_stack_control.yml @@ -158,6 +158,9 @@ kickstart_tftpdir: "{{ pxe_config_dir }}/menus" # hour: "*/24" # job: "cd {{ docker_stack__dir }} && /usr/local/bin/docker-compose restart jenkins" +## ansible-template-ui +docker_stack__ansibletemplateui__cred_docker_registry_admin_username: "{{ vault__docker_registry_username }}" +docker_stack__ansibletemplateui__cred_docker_registry_admin_password: "{{ vault__docker_registry_username }}" #cobbler_firewalld_ports: firewalld_ports__cobbler: diff --git a/inventory/group_vars/docker_stack_jenkins_controller.yml b/inventory/group_vars/docker_stack_jenkins_controller.yml index 9dfbf1e65..33e04c470 100644 --- a/inventory/group_vars/docker_stack_jenkins_controller.yml +++ b/inventory/group_vars/docker_stack_jenkins_controller.yml @@ -61,28 +61,28 @@ docker_stack__jenkins__ssh_port: 4444 ####################### ## config-as-code creds -docker_stack__jenkins__cred_jenkins_admin_user_username: "{{ vault__jenkins_cred_jenkins_admin_user_username }}" -docker_stack__jenkins__cred_jenkins_admin_user_password: "{{ vault__jenkins_cred_jenkins_admin_user_password }}" -docker_stack__jenkins__cred_jenkins_git_user_password: "{{ vault__jenkins_cred_jenkins_git_user_password }}" +docker_stack__jenkins__cred_jenkins_admin_user_username: "{{ vault__jenkins_admin_user_username }}" +docker_stack__jenkins__cred_jenkins_admin_user_password: "{{ vault__jenkins_admin_user_password }}" +docker_stack__jenkins__cred_jenkins_git_user_password: "{{ vault__jenkins_git_user_password }}" docker_stack__jenkins__cred_ansible_vault_password: "{{ vault__ansible_vault_password }}" -docker_stack__jenkins__cred_ansible_ssh_key: "{{ vault__jenkins_cred_ansible_ssh_key }}" +docker_stack__jenkins__cred_ansible_ssh_key: "{{ vault__ansible_ssh_key }}" -docker_stack__jenkins__cred_ansible_ssh_username: "{{ vault__jenkins_cred_ansible_ssh_username }}" -docker_stack__jenkins__cred_ansible_ssh_password: "{{ vault__jenkins_cred_ansible_ssh_password }}" +docker_stack__jenkins__cred_ansible_ssh_username: "{{ vault__ansible_ssh_username }}" +docker_stack__jenkins__cred_ansible_ssh_password: "{{ vault__ansible_ssh_password }}" -docker_stack__jenkins__cred_vsphere_username: "{{ vault__jenkins_cred_vsphere_username }}" -docker_stack__jenkins__cred_vsphere_password: "{{ vault__jenkins_cred_vsphere_password }}" +docker_stack__jenkins__cred_vsphere_username: "{{ vault__vsphere_username }}" +docker_stack__jenkins__cred_vsphere_password: "{{ vault__vsphere_password }}" docker_stack__jenkins__cred_esxi_password: "{{ vault__esxi_password }}" -docker_stack__jenkins__cred_bitbucket_ssh_username: "{{ vault__jenkins_cred_bitbucket_ssh_username }}" -docker_stack__jenkins__cred_bitbucket_ssh_private_key: "{{ vault__jenkins_cred_bitbucket_ssh_private_key }}" +docker_stack__jenkins__cred_bitbucket_ssh_username: "{{ vault__bitbucket_ssh_username }}" +docker_stack__jenkins__cred_bitbucket_ssh_private_key: "{{ vault__bitbucket_ssh_private_key }}" #docker_stack__jenkins__cred_packer_ssh_password: "{{ vault__packer_user_password }}" docker_stack__jenkins__cred_packer_user_password: "{{ vault__packer_user_password }}" docker_stack__jenkins__cred_vm_root_password: "{{ vault__sha512_hashed_root_password }}" -docker_stack__jenkins__cred_github_username: "{{ vault__jenkins_cred_github_username }}" -docker_stack__jenkins__cred_github_password: "{{ vault__jenkins_cred_github_password }}" +docker_stack__jenkins__cred_github_username: "{{ vault__github_username }}" +docker_stack__jenkins__cred_github_password: "{{ vault__github_password }}" -docker_stack__jenkins__cred_docker_registry_admin_username: "{{ vault__jenkins_cred_docker_registry_admin_username }}" -docker_stack__jenkins__cred_docker_registry_admin_password: "{{ vault__jenkins_cred_docker_registry_admin_password }}" +docker_stack__jenkins__cred_docker_registry_admin_username: "{{ vault__docker_registry_admin_username }}" +docker_stack__jenkins__cred_docker_registry_admin_password: "{{ vault__docker_registry_admin_password }}" docker_stack__jenkins__cred_bitbucket_cloud_oauth_key: "{{ vault__jenkins_bitbucket_cloud_oauth_key }}" docker_stack__jenkins__cred_bitbucket_cloud_oauth_token: "{{ vault__jenkins_bitbucket_cloud_oauth_token }}" diff --git a/inventory/group_vars/docker_stack_jenkins_jcac.yml b/inventory/group_vars/docker_stack_jenkins_jcac.yml index e3f295e8d..8a1037f3f 100644 --- a/inventory/group_vars/docker_stack_jenkins_jcac.yml +++ b/inventory/group_vars/docker_stack_jenkins_jcac.yml @@ -53,28 +53,28 @@ docker_stack__jenkins_jcac__ssh_public_key: "{{ vault__git_ssh_public_keyfile }} ####################### ## config-as-code creds -docker_stack__jenkins_jcac__cred_jenkins_admin_user_username: "{{ vault__jenkins_cred_jenkins_admin_user_username }}" -docker_stack__jenkins_jcac__cred_jenkins_admin_user_password: "{{ vault__jenkins_cred_jenkins_admin_user_password }}" -docker_stack__jenkins_jcac__cred_jenkins_git_user_password: "{{ vault__jenkins_cred_jenkins_git_user_password }}" +docker_stack__jenkins_jcac__cred_jenkins_admin_user_username: "{{ vault__jenkins_admin_user_username }}" +docker_stack__jenkins_jcac__cred_jenkins_admin_user_password: "{{ vault__jenkins_admin_user_password }}" +docker_stack__jenkins_jcac__cred_jenkins_git_user_password: "{{ vault__jenkins_git_user_password }}" docker_stack__jenkins_jcac__cred_ansible_vault_password: "{{ vault__ansible_vault_password }}" -docker_stack__jenkins_jcac__cred_ansible_ssh_key: "{{ vault__jenkins_cred_ansible_ssh_key }}" +docker_stack__jenkins_jcac__cred_ansible_ssh_key: "{{ vault__ansible_ssh_key }}" -docker_stack__jenkins_jcac__cred_ansible_ssh_username: "{{ vault__jenkins_cred_ansible_ssh_username }}" -docker_stack__jenkins_jcac__cred_ansible_ssh_password: "{{ vault__jenkins_cred_ansible_ssh_password }}" +docker_stack__jenkins_jcac__cred_ansible_ssh_username: "{{ vault__ansible_ssh_username }}" +docker_stack__jenkins_jcac__cred_ansible_ssh_password: "{{ vault__ansible_ssh_password }}" -docker_stack__jenkins_jcac__cred_vsphere_username: "{{ vault__jenkins_cred_vsphere_username }}" -docker_stack__jenkins_jcac__cred_vsphere_password: "{{ vault__jenkins_cred_vsphere_password }}" +docker_stack__jenkins_jcac__cred_vsphere_username: "{{ vault__vsphere_username }}" +docker_stack__jenkins_jcac__cred_vsphere_password: "{{ vault__vsphere_password }}" docker_stack__jenkins_jcac__cred_esxi_password: "{{ vault__esxi_password }}" -docker_stack__jenkins_jcac__cred_bitbucket_ssh_username: "{{ vault__jenkins_cred_bitbucket_ssh_username }}" -docker_stack__jenkins_jcac__cred_bitbucket_ssh_private_key: "{{ vault__jenkins_cred_bitbucket_ssh_private_key }}" +docker_stack__jenkins_jcac__cred_bitbucket_ssh_username: "{{ vault__bitbucket_ssh_username }}" +docker_stack__jenkins_jcac__cred_bitbucket_ssh_private_key: "{{ vault__bitbucket_ssh_private_key }}" #docker_stack__jenkins_jcac__cred_packer_ssh_password: "{{ vault__packer_user_password }}" docker_stack__jenkins_jcac__cred_packer_user_password: "{{ vault__packer_user_password }}" docker_stack__jenkins_jcac__cred_vm_root_password: "{{ vault__sha512_hashed_root_password }}" -docker_stack__jenkins_jcac__cred_github_username: "{{ vault__jenkins_cred_github_username }}" -docker_stack__jenkins_jcac__cred_github_password: "{{ vault__jenkins_cred_github_password }}" +docker_stack__jenkins_jcac__cred_github_username: "{{ vault__github_username }}" +docker_stack__jenkins_jcac__cred_github_password: "{{ vault__github_password }}" -docker_stack__jenkins_jcac__cred_docker_registry_admin_username: "{{ vault__jenkins_cred_docker_registry_admin_username }}" -docker_stack__jenkins_jcac__cred_docker_registry_admin_password: "{{ vault__jenkins_cred_docker_registry_admin_password }}" +docker_stack__jenkins_jcac__cred_docker_registry_admin_username: "{{ vault__docker_registry_username }}" +docker_stack__jenkins_jcac__cred_docker_registry_admin_password: "{{ vault__docker_registry_password }}" docker_stack__jenkins_jcac__cred_bitbucket_cloud_oauth_key: "{{ vault__jenkins_bitbucket_cloud_oauth_key }}" docker_stack__jenkins_jcac__cred_bitbucket_cloud_oauth_token: "{{ vault__jenkins_bitbucket_cloud_oauth_token }}" diff --git a/inventory/run-inventory-tests.sh b/inventory/run-inventory-tests.sh index 0987aa1b3..844601b08 100755 --- a/inventory/run-inventory-tests.sh +++ b/inventory/run-inventory-tests.sh @@ -1,6 +1,6 @@ #!/usr/bin/env bash -VERSION="2024.2.1" +VERSION="2024.5.1" #SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" SCRIPT_DIR="$(dirname "$0")" @@ -10,10 +10,8 @@ SCRIPT_DIR="$(dirname "$0")" PROJECT_DIR="$(cd "${SCRIPT_DIR}" && git rev-parse --show-toplevel)" INVENTORY_DIR="${PROJECT_DIR}/inventory" -#KEEP_TMP=1 - +KEEP_TMP=0 RUN_PYTEST=0 -LIST_TEST_CASES=0 ENSURE_PYTHON_MODULES=0 PYTEST_JUNIT_REPORT_DEFAULT=".test-results/junit-report.xml" @@ -658,7 +656,7 @@ function ensure_tool() { esac logDebug "${LOG_PREFIX} installing executable '${executable}'" - eval "${install_function} ${OS}" + eval "${install_function} ${PLATFORM_OS}" fi } @@ -673,6 +671,7 @@ function usage() { echo " -p : run pytest" echo " -r [PYTEST_JUNIT_REPORT] : use specified junitxml path for pytest report" echo " -v : show script version" + echo " -k : keep temp directory/files" echo " -h : help" echo " [TEST_CASES]" echo "" @@ -681,6 +680,7 @@ function usage() { echo " ${0} -l" echo " ${0} 01" echo " ${0} validate_file_extensions" + echo " ${0} -k -L DEBUG validate_yml_sortorder" echo " ${0} 01 03" echo " ${0} -L DEBUG 02 04" echo " ${0} -p" @@ -695,12 +695,13 @@ function main() { checkRequiredCommands ansible-inventory yamllint - while getopts "L:r:dlpvh" opt; do + while getopts "L:r:dlpvhk" opt; do case "${opt}" in L) setLogLevel "${OPTARG}" ;; + l) print_test_cases && exit ;; r) PYTEST_JUNIT_REPORT="${OPTARG}" ;; d) DISPLAY_TEST_RESULTS=1 ;; - l) LIST_TEST_CASES=1 ;; + k) KEEP_TMP=1 ;; p) RUN_PYTEST=1 ;; v) echo "${VERSION}" && exit ;; h) usage 1 ;; @@ -713,10 +714,6 @@ function main() { if [[ -n "${PYTEST_JUNIT_REPORT-}" ]]; then PYTEST_JUNIT_REPORT="${PYTEST_JUNIT_REPORT_DEFAULT}" fi - if [ "${LIST_TEST_CASES}" -eq 1 ]; then - print_test_cases - exit - fi ## ref: https://pypi.org/project/yq/ logDebug "Ensure jq present/installed (required for yq sort-keys)" diff --git a/roles/ansible-ping-test/tasks/main.yml b/roles/ansible-ping-test/tasks/main.yml index 6d6984853..70c55b7f0 100644 --- a/roles/ansible-ping-test/tasks/main.yml +++ b/roles/ansible-ping-test/tasks/main.yml @@ -63,7 +63,7 @@ - name: ping test via CLI delegate_to: localhost become: yes - command: "ping -c 1 -w 2 {{ ansible_host }}" + ansible.builtin.command: "ping -c 1 -w 2 {{ ansible_host }}" register: ping_cli_test changed_when: no diff --git a/roles/ansible-role-bind/tasks/main.yml b/roles/ansible-role-bind/tasks/main.yml index d279a05ee..5b3b8ef98 100644 --- a/roles/ansible-role-bind/tasks/main.yml +++ b/roles/ansible-role-bind/tasks/main.yml @@ -48,7 +48,7 @@ tags: bind - name: Create serial, based on UTC UNIX time - command: date -u +%s + ansible.builtin.command: date -u +%s register: timestamp changed_when: false run_once: true diff --git a/roles/ansible-role-dhcp/tasks/apparmor-fix.yml b/roles/ansible-role-dhcp/tasks/apparmor-fix.yml index 172312312..c85b155cd 100644 --- a/roles/ansible-role-dhcp/tasks/apparmor-fix.yml +++ b/roles/ansible-role-dhcp/tasks/apparmor-fix.yml @@ -35,4 +35,4 @@ tags: dhcp - name: AppArmor fix | Force running handlers now - meta: flush_handlers + ansible.builtin.meta: flush_handlers diff --git a/roles/ansible-role-pdc/tasks/main.yml b/roles/ansible-role-pdc/tasks/main.yml index 3c8022531..9553b89e1 100644 --- a/roles/ansible-role-pdc/tasks/main.yml +++ b/roles/ansible-role-pdc/tasks/main.yml @@ -52,7 +52,7 @@ when: not ansible_windows_domain_member - name: Force all notified handlers to run at this point, not waiting for normal sync points - meta: flush_handlers + ansible.builtin.meta: flush_handlers - name: Ensure a Domain Controller is available in the domain win_dsc: diff --git a/roles/awx-docker/tasks/build_image.yml b/roles/awx-docker/tasks/build_image.yml index 8339f187f..04ab405aa 100644 --- a/roles/awx-docker/tasks/build_image.yml +++ b/roles/awx-docker/tasks/build_image.yml @@ -5,7 +5,7 @@ when: awx_version is not defined - name: Verify awx-logos directory exists for official install - stat: + ansible.builtin.stat: path: "../../awx-logos" # delegate_to: localhost register: logosdir @@ -211,7 +211,7 @@ # delegate_to: localhost - name: Tag task and web images as latest - command: "docker tag {{ item }}:{{ awx_version }} {{ item }}:latest" + ansible.builtin.command: "docker tag {{ item }}:{{ awx_version }} {{ item }}:latest" # delegate_to: localhost with_items: - "{{ awx_task_image }}" diff --git a/roles/awx-docker/tasks/check_docker.yml b/roles/awx-docker/tasks/check_docker.yml index 875e8a21e..9623ee227 100644 --- a/roles/awx-docker/tasks/check_docker.yml +++ b/roles/awx-docker/tasks/check_docker.yml @@ -1,7 +1,7 @@ # check_docker.yml --- - name: awx_postgres_data_dir should be defined - assert: + ansible.builtin.assert: that: - awx_postgres_data_dir is defined and awx_postgres_data_dir != '' msg: "Set the value of 'awx_postgres_data_dir' in the inventory file." diff --git a/roles/awx-docker/tasks/compose.yml b/roles/awx-docker/tasks/compose.yml index a2fe4bbb0..9d27533e9 100644 --- a/roles/awx-docker/tasks/compose.yml +++ b/roles/awx-docker/tasks/compose.yml @@ -1,6 +1,6 @@ --- - name: Create {{ awx_inventory_dir }} directory - file: + ansible.builtin.file: path: "{{ awx_inventory_dir }}" state: directory owner: "{{ docker_user_username }}" @@ -72,11 +72,11 @@ register: awx_compose_start - name: Update CA trust in awx_web container - command: "docker exec {{ awx_container_prefix }}_web '/usr/bin/update-ca-trust'" + ansible.builtin.command: "docker exec {{ awx_container_prefix }}_web '/usr/bin/update-ca-trust'" when: awx_compose_config.changed or awx_compose_start.changed - name: Update CA trust in awx_task container - command: "docker exec {{ awx_container_prefix }}_task '/usr/bin/update-ca-trust'" + ansible.builtin.command: "docker exec {{ awx_container_prefix }}_task '/usr/bin/update-ca-trust'" when: awx_compose_config.changed or awx_compose_start.changed - name: Wait for launch script to create user @@ -87,8 +87,8 @@ ## moved to smoke-test.yml # ## ref: https://github.com/ansible/awx/blob/20.1.0/tools/docker-compose/ansible/smoke-test.yml # - name: Create Preload data -## command: "docker exec {{ awx_container_prefix }}_task bash -c '/usr/bin/awx-manage create_preload_data'" -# command: "docker exec {{ awx_container_prefix }}_task bash -c '/usr/bin/awx-manage create_preload_data'" +## ansible.builtin.command: "docker exec {{ awx_container_prefix }}_task bash -c '/usr/bin/awx-manage create_preload_data'" +# ansible.builtin.command: "docker exec {{ awx_container_prefix }}_task bash -c '/usr/bin/awx-manage create_preload_data'" # when: awx_create_preload_data|bool # register: awx_cdo # changed_when: "'added' in awx_cdo.stdout" diff --git a/roles/awx-docker/tasks/main.yml b/roles/awx-docker/tasks/main.yml index 98b76a46f..f55c45c28 100644 --- a/roles/awx-docker/tasks/main.yml +++ b/roles/awx-docker/tasks/main.yml @@ -19,7 +19,7 @@ no_log: true - name: Check for existing Postgres data - stat: + ansible.builtin.stat: path: "{{ awx_postgres_data_dir }}/pgdata/PG_VERSION" register: pg_version_file @@ -33,7 +33,7 @@ upgrade_postgres: "{{ old_pg_version is defined and old_pg_version == '9.6' }}" - name: Set up new postgres paths pre-upgrade - file: + ansible.builtin.file: state: directory path: "{{ item }}" recurse: true @@ -68,7 +68,7 @@ # when: upgrade_postgres | bool # #- name: Remove old data directory -# file: +# ansible.builtin.file: # path: "{{ awx_postgres_data_dir + '/pgdata' }}" # state: absent # when: awx_compose_start_containers|bool diff --git a/roles/awx-docker/tasks/set_image.yml b/roles/awx-docker/tasks/set_image.yml index 7bd6cf636..a17a0fca8 100644 --- a/roles/awx-docker/tasks/set_image.yml +++ b/roles/awx-docker/tasks/set_image.yml @@ -24,7 +24,7 @@ when: ansible_connection != "local" and awx_docker_registry is not defined - name: Ensure directory exists - file: + ansible.builtin.file: path: "{{ docker_deploy_base_path }}" state: directory when: ansible_connection != "local" and awx_docker_registry is not defined diff --git a/roles/awx-docker/tasks/smoke_test.yml b/roles/awx-docker/tasks/smoke_test.yml index cf1bab205..137cd1748 100644 --- a/roles/awx-docker/tasks/smoke_test.yml +++ b/roles/awx-docker/tasks/smoke_test.yml @@ -2,7 +2,7 @@ # Takes a while for migrations to finish - name: Wait for the environment to be ready - uri: + ansible.builtin.uri: # url: "http://localhost:8013/api/v2/ping/" url: "http://localhost:{{ awx_host_port }}/api/v2/ping/" register: _result diff --git a/roles/bootstrap-awstats/defaults/main.yml b/roles/bootstrap-awstats/defaults/main.yml index 21677d4c7..0e622f27c 100644 --- a/roles/bootstrap-awstats/defaults/main.yml +++ b/roles/bootstrap-awstats/defaults/main.yml @@ -6,7 +6,7 @@ awstats_pkg_state: present # Repository states: present or absent #awstats_repository_state: present -apache_directory : "apache2" +apache_directory: "apache2" apache_conf_path: "/etc/{{ apache_directory }}" apache_log_path: "/var/log/{{ apache_directory }}" #apache_log_path: "${APACHE_LOG_DIR}" diff --git a/roles/bootstrap-awstats/tasks/remove.yml b/roles/bootstrap-awstats/tasks/remove.yml index fea495ae8..b43325e03 100644 --- a/roles/bootstrap-awstats/tasks/remove.yml +++ b/roles/bootstrap-awstats/tasks/remove.yml @@ -17,7 +17,7 @@ notify: restart apache - name: apache | Disable the awstats site - command: a2dissite awstats + ansible.builtin.command: a2dissite awstats ignore_errors: yes notify: restart apache diff --git a/roles/bootstrap-awstats/tasks/setup.yml b/roles/bootstrap-awstats/tasks/setup.yml index a8c9d4aca..6670ab8ac 100644 --- a/roles/bootstrap-awstats/tasks/setup.yml +++ b/roles/bootstrap-awstats/tasks/setup.yml @@ -20,7 +20,7 @@ # notify: restart apache - name: apache | Enable some required modules (rewrite) - command: a2enmod rewrite vhost_alias cgi + ansible.builtin.command: a2enmod rewrite vhost_alias cgi - name: apache | Add apache awstats vhosts configuration. ansible.builtin.template: @@ -33,7 +33,7 @@ notify: restart apache - name: apache | Enable the awstats site - command: a2ensite awstats + ansible.builtin.command: a2ensite awstats ignore_errors: yes when: apache_create_vhosts|bool notify: restart apache diff --git a/roles/bootstrap-certs/tasks/trust_cert.yml b/roles/bootstrap-certs/tasks/trust_cert.yml index 00bf55154..a932692b3 100644 --- a/roles/bootstrap-certs/tasks/trust_cert.yml +++ b/roles/bootstrap-certs/tasks/trust_cert.yml @@ -63,11 +63,11 @@ ## ref: https://access.redhat.com/documentation/en-us/red_hat_enterprise_linux/8/html/security_hardening/using-shared-system-certificates_security-hardening ## ref: https://techjourney.net/update-add-ca-certificates-bundle-in-redhat-centos/ - name: "trust_cert | update CA trust: {{ __bootstrap_certs__trust_ca_update_trust_cmd }}" - command: "{{ __bootstrap_certs__trust_ca_update_trust_cmd }}" + ansible.builtin.command: "{{ __bootstrap_certs__trust_ca_update_trust_cmd }}" # when: trust_ca_cacertinstalled|bool is changed or bootstrap_certs__ca_force_distribute_nodes|bool #- name: convert to pkcs12 -# command: openssl pkcs12 -export \ +# ansible.builtin.command: openssl pkcs12 -export \ # -in {{ ca_path }}/{{ __bootstrap_certs__cert_node.domainName }}/{{ __bootstrap_certs__cert_node.commonName }}.crt \ # -inkey {{ ca_path }}/{{ __bootstrap_certs__cert_node.domainName}}/{{ __bootstrap_certs__cert_node.commonName }}.key \ # -out {{ ca_path }}/{{ __bootstrap_certs__cert_node.domainName }}/{{ __bootstrap_certs__cert_node.commonName }}.p12 \ @@ -78,7 +78,7 @@ #- name: "trust_cert | Add service cert to keystore" # when: bootstrap_certs__ca_java_keystore_enabled|bool -# command: | +# ansible.builtin.command: | # keytool -importcert \ # -storepass {{ bootstrap_certs__ca_java_keystore_pass }} \ # -keystore {{ ca_java_keystore }} \ @@ -91,7 +91,7 @@ #- name: "trust_cert | Add root ca root and host certs to keystore" # when: bootstrap_certs__ca_java_keystore_enabled|bool -# command: | +# ansible.builtin.command: | # keytool -importcert -v \ # -storepass {{ bootstrap_certs__ca_java_keystore_pass }} \ # -noprompt \ @@ -111,7 +111,7 @@ ## ref: https://superuser.com/questions/881665/keytool-commands-to-replace-existing-ssl-certificate ## ref: https://stackoverflow.com/questions/48204014/how-to-delete-already-import-certificate-alias-by-keytool-command - name: "trust_cert | Remove old service cert from keystore" - command: | + ansible.builtin.command: | keytool -delete -v \ -storepass {{ bootstrap_certs__ca_java_keystore_pass }} \ -noprompt \ @@ -127,7 +127,7 @@ verbosity: 1 - name: "trust_cert | Add current service cert to keystore" - command: | + ansible.builtin.command: | keytool -importcert -v \ -storepass {{ bootstrap_certs__ca_java_keystore_pass }} \ -noprompt \ diff --git a/roles/bootstrap-dhcp/tasks/apparmor-fix.yml b/roles/bootstrap-dhcp/tasks/apparmor-fix.yml index 172312312..c85b155cd 100644 --- a/roles/bootstrap-dhcp/tasks/apparmor-fix.yml +++ b/roles/bootstrap-dhcp/tasks/apparmor-fix.yml @@ -35,4 +35,4 @@ tags: dhcp - name: AppArmor fix | Force running handlers now - meta: flush_handlers + ansible.builtin.meta: flush_handlers diff --git a/roles/bootstrap-docker/tasks/deploy_config.yml b/roles/bootstrap-docker/tasks/deploy_config.yml index 6419c64cf..4c21a71f4 100644 --- a/roles/bootstrap-docker/tasks/deploy_config.yml +++ b/roles/bootstrap-docker/tasks/deploy_config.yml @@ -110,7 +110,7 @@ notify: Docker | Restart Docker #- name: {{ log_prefix_local }} Restart Docker now to make sure `docker login` works -# meta: "flush_handlers" +# ansible.builtin.meta: "flush_handlers" # #- name: {{ log_prefix_local }} Manage Docker registry login credentials # community.docker.docker_login: diff --git a/roles/bootstrap-docker/tasks/main.yml b/roles/bootstrap-docker/tasks/main.yml index 98cd9bc24..2b3ab167e 100644 --- a/roles/bootstrap-docker/tasks/main.yml +++ b/roles/bootstrap-docker/tasks/main.yml @@ -1,6 +1,6 @@ --- -- meta: flush_handlers +- ansible.builtin.meta: flush_handlers - name: "Display bootstrap_docker__actions" ansible.builtin.debug: diff --git a/roles/bootstrap-fog/tasks/main.yml b/roles/bootstrap-fog/tasks/main.yml index 71700bcfa..798403938 100644 --- a/roles/bootstrap-fog/tasks/main.yml +++ b/roles/bootstrap-fog/tasks/main.yml @@ -2,7 +2,7 @@ - name: Ensure a user for FOG ansible.builtin.user: name: "{{ fog_user }}" - shell: /bin/bash + ansible.builtin.shell: /bin/bash group: sudo append: yes createhome: yes diff --git a/roles/bootstrap-git/tasks/install-from-source.yml b/roles/bootstrap-git/tasks/install-from-source.yml index c3c95b8ff..a596e2024 100644 --- a/roles/bootstrap-git/tasks/install-from-source.yml +++ b/roles/bootstrap-git/tasks/install-from-source.yml @@ -24,7 +24,7 @@ state: present - name: Get installed version. - command: git --version warn=no + ansible.builtin.command: git --version warn=no changed_when: false failed_when: false check_mode: false @@ -67,7 +67,7 @@ - name: Build git. when: git_reinstall_from_source | bool - command: > + ansible.builtin.command: > make prefix={{ git_install_path }} {{ item }} chdir={{ workspace }}/git-{{ git_version }} with_items: diff --git a/roles/bootstrap-govc/tasks/cloud_init_boot.yml b/roles/bootstrap-govc/tasks/cloud_init_boot.yml index 39be22bd1..144e0a40f 100644 --- a/roles/bootstrap-govc/tasks/cloud_init_boot.yml +++ b/roles/bootstrap-govc/tasks/cloud_init_boot.yml @@ -4,7 +4,7 @@ - block: - name: Push cloud init seed to vcenter datastore - command: "govc datastore.upload seed.iso {{ inventory_hostname }}/config.iso" + ansible.builtin.command: "govc datastore.upload seed.iso {{ inventory_hostname }}/config.iso" args: chdir: '{{ seed_iso_base }}/{{ inventory_hostname }}' @@ -18,7 +18,7 @@ - name: Insert cloud-init cdrom into vm - command: | + ansible.builtin.command: | govc device.cdrom.insert \ -vm /{{ hostvars[esx_hostname]['esx_datacenter'] }}/vm/{{ inventory_hostname }} \ -device {{ esx_cdrom_device }} \ @@ -33,7 +33,7 @@ GOVC_DATASTORE: "{{ bootstrap_govc__datastore }}" - name: Mount the cloud-init cdrom on the vm - command: "govc device.connect -vm /{{ hostvars[esx_hostname]['esx_datacenter'] }}/vm/{{ inventory_hostname }} {{ esx_cdrom_device }}" + ansible.builtin.command: "govc device.connect -vm /{{ hostvars[esx_hostname]['esx_datacenter'] }}/vm/{{ inventory_hostname }} {{ esx_cdrom_device }}" environment: GOVC_HOST: "{{ bootstrap_govc__host }}" @@ -45,7 +45,7 @@ - name: Power on vm - command: "govc vm.power -on {{ inventory_hostname }}" + ansible.builtin.command: "govc vm.power -on {{ inventory_hostname }}" environment: GOVC_HOST: "{{ bootstrap_govc__host }}" GOVC_URL: "https://{{ bootstrap_govc__host }}/sdk" @@ -55,7 +55,7 @@ GOVC_DATASTORE: "{{ bootstrap_govc__datastore }}" - name: Wait for vm boot - command: "govc vm.info -waitip {{ inventory_hostname }}" + ansible.builtin.command: "govc vm.info -waitip {{ inventory_hostname }}" environment: GOVC_HOST: "{{ bootstrap_govc__host }}" GOVC_URL: "https://{{ bootstrap_govc__host }}/sdk" @@ -65,7 +65,7 @@ GOVC_DATASTORE: "{{ bootstrap_govc__datastore }}" # # - name: Reboot vm to establish new settings - # command: "govc vm.power -r {{ inventory_hostname }}" + # ansible.builtin.command: "govc vm.power -r {{ inventory_hostname }}" # environment: # GOVC_HOST: "{{ bootstrap_govc__host }}" # GOVC_URL: "https://{{ bootstrap_govc__host }}/sdk" @@ -76,7 +76,7 @@ rescue: - name: Poweroff vm - command: "govc vm.power -off {{ inventory_hostname }}" + ansible.builtin.command: "govc vm.power -off {{ inventory_hostname }}" environment: GOVC_HOST: "{{ bootstrap_govc__host }}" GOVC_URL: "https://{{ bootstrap_govc__host }}/sdk" @@ -86,7 +86,7 @@ GOVC_DATASTORE: "{{ bootstrap_govc__datastore }}" - name: Disconnect the cloud-init cdrom on the vm - command: "govc device.disconnect -vm /{{ hostvars[esx_hostname]['esx_datacenter'] }}/vm/{{ inventory_hostname }} {{ esx_cdrom_device }}" + ansible.builtin.command: "govc device.disconnect -vm /{{ hostvars[esx_hostname]['esx_datacenter'] }}/vm/{{ inventory_hostname }} {{ esx_cdrom_device }}" environment: GOVC_HOST: "{{ bootstrap_govc__host }}" GOVC_URL: "https://{{ bootstrap_govc__host }}/sdk" @@ -96,7 +96,7 @@ GOVC_DATASTORE: "{{ bootstrap_govc__datastore }}" - name: Eject cloud-init cdrom from vm - command: | + ansible.builtin.command: | govc device.cdrom.eject \ -vm /{{ hostvars[esx_hostname]['esx_datacenter'] }}/vm/{{ inventory_hostname }} \ -device {{ esx_cdrom_device }} \ diff --git a/roles/bootstrap-govc/tasks/deploy_ova.yml b/roles/bootstrap-govc/tasks/deploy_ova.yml index 925e99736..8ec91c841 100644 --- a/roles/bootstrap-govc/tasks/deploy_ova.yml +++ b/roles/bootstrap-govc/tasks/deploy_ova.yml @@ -3,7 +3,7 @@ --- - name: Check for existing VM - command: "{{ bootstrap_govc__file }} vm.info {{ item.name |quote }}" + ansible.builtin.command: "{{ bootstrap_govc__file }} vm.info {{ item.name |quote }}" environment: GOVC_HOST: "{{ bootstrap_govc__host }}" GOVC_URL: "https://{{ bootstrap_govc__host }}/sdk" @@ -20,7 +20,7 @@ # unless vm by this name already exists, import ova - name: Import OVA - command: > + ansible.builtin.command: > {{ bootstrap_govc__file }} import.ova {% if item.0.spec is defined %} -options={{ item.0.spec }} diff --git a/roles/bootstrap-govc/tasks/install.yml b/roles/bootstrap-govc/tasks/install.yml index a69941636..7cb75e9b6 100644 --- a/roles/bootstrap-govc/tasks/install.yml +++ b/roles/bootstrap-govc/tasks/install.yml @@ -3,7 +3,7 @@ --- - name: Check the current govc version (if any) - command: "{{ bootstrap_govc__path }}/govc version" + ansible.builtin.command: "{{ bootstrap_govc__path }}/govc version" ignore_errors: true register: __govc_installed_version changed_when: false @@ -61,12 +61,12 @@ when: "'64' in ansible_architecture" - name: Uncompress govc binary - command: "gunzip {{ bootstrap_govc__tmp }}/govc-{{ bootstrap_govc__version }}.gz" + ansible.builtin.command: "gunzip {{ bootstrap_govc__tmp }}/govc-{{ bootstrap_govc__version }}.gz" args: creates: "{{ bootstrap_govc__tmp }}/govc-{{ bootstrap_govc__version }}" - name: move govc into place - command: mv "{{ bootstrap_govc__tmp }}/govc-{{ bootstrap_govc__version }}" "{{ bootstrap_govc__file }}" + ansible.builtin.command: mv "{{ bootstrap_govc__tmp }}/govc-{{ bootstrap_govc__version }}" "{{ bootstrap_govc__file }}" - name: Ensure govc executable file: @@ -74,7 +74,7 @@ mode: "0755" - name: Verify installed govc version - command: "{{ bootstrap_govc__path }}/govc version" + ansible.builtin.command: "{{ bootstrap_govc__path }}/govc version" ignore_errors: false register: __govc_version_installed changed_when: false diff --git a/roles/bootstrap-inspec/tasks/main.yml b/roles/bootstrap-inspec/tasks/main.yml index 5a033eb64..7a6f3e4b5 100644 --- a/roles/bootstrap-inspec/tasks/main.yml +++ b/roles/bootstrap-inspec/tasks/main.yml @@ -14,7 +14,7 @@ # when: ansible_facts.os_family == 'RedHat' - name: "Get installed inspec version." - command: inspec version + ansible.builtin.command: inspec version ignore_errors: yes changed_when: false failed_when: false diff --git a/roles/bootstrap-jenkins/tasks/settings.yml b/roles/bootstrap-jenkins/tasks/settings.yml index 60720b60e..c10e5151a 100644 --- a/roles/bootstrap-jenkins/tasks/settings.yml +++ b/roles/bootstrap-jenkins/tasks/settings.yml @@ -78,7 +78,7 @@ - jenkins_proxy_port | length > 0 - name: Trigger handlers immediately in case Jenkins was installed - meta: flush_handlers + ansible.builtin.meta: flush_handlers - name: Immediately restart Jenkins on http or user changes. service: diff --git a/roles/bootstrap-kvm/tasks/apparmor.yml b/roles/bootstrap-kvm/tasks/apparmor.yml index c93073317..82b88c6e5 100644 --- a/roles/bootstrap-kvm/tasks/apparmor.yml +++ b/roles/bootstrap-kvm/tasks/apparmor.yml @@ -14,7 +14,7 @@ dest: /etc/apparmor.d/disable/usr.lib.libvirt.virt-aa-helper - name: apparmor | disabling apparmor profiles for libvirt - command: "apparmor_parser -R {{ item }}" + ansible.builtin.command: "apparmor_parser -R {{ item }}" become: true loop: - /etc/apparmor.d/usr.sbin.libvirtd diff --git a/roles/bootstrap-netplan/handlers/main.yml b/roles/bootstrap-netplan/handlers/main.yml index c7ac15be1..7c8b62b24 100644 --- a/roles/bootstrap-netplan/handlers/main.yml +++ b/roles/bootstrap-netplan/handlers/main.yml @@ -1,12 +1,12 @@ --- - name: Generating Netplan Configuration - command: netplan generate + ansible.builtin.command: netplan generate listen: netplan generate config notify: netplan apply config become: true - name: Applying Netplan Configuration - command: netplan apply + ansible.builtin.command: netplan apply listen: netplan apply config become: true when: bootstrap_netplan__apply diff --git a/roles/bootstrap-netplan/tasks/main.yml b/roles/bootstrap-netplan/tasks/main.yml index eae0588e7..356e536bc 100644 --- a/roles/bootstrap-netplan/tasks/main.yml +++ b/roles/bootstrap-netplan/tasks/main.yml @@ -2,6 +2,6 @@ - ansible.builtin.import_tasks: netplan.yml when: bootstrap_netplan__enabled -- meta: flush_handlers +- ansible.builtin.meta: flush_handlers diff --git a/roles/bootstrap-nfs/handlers/main.yml b/roles/bootstrap-nfs/handlers/main.yml index 4f466912a..8bb4e2819 100644 --- a/roles/bootstrap-nfs/handlers/main.yml +++ b/roles/bootstrap-nfs/handlers/main.yml @@ -1,4 +1,4 @@ --- - name: reload nfs - command: 'exportfs -ra' + ansible.builtin.command: 'exportfs -ra' diff --git a/roles/bootstrap-nodejs/tasks/setup-RedHat.yml b/roles/bootstrap-nodejs/tasks/setup-RedHat.yml index 61a57d44a..225ba1fb4 100644 --- a/roles/bootstrap-nodejs/tasks/setup-RedHat.yml +++ b/roles/bootstrap-nodejs/tasks/setup-RedHat.yml @@ -28,7 +28,7 @@ when: ansible_distribution_major_version|int >= 7 - name: Ensure Node.js AppStream module is disabled (CentOS 8+). - command: yum module disable -y nodejs + ansible.builtin.command: yum module disable -y nodejs args: warn: false register: module_disable diff --git a/roles/bootstrap-ntp/tasks/sync-chrony.yml b/roles/bootstrap-ntp/tasks/sync-chrony.yml index 35aaccb75..7b0d395e2 100644 --- a/roles/bootstrap-ntp/tasks/sync-chrony.yml +++ b/roles/bootstrap-ntp/tasks/sync-chrony.yml @@ -1,8 +1,8 @@ --- - name: Force NTP sync - command: chronyc makestep + ansible.builtin.command: chronyc makestep - name: Ensure system is NTP time synced when: bootstrap_ntp__chrony_waitsync|d(True)|bool - command: chronyc waitsync 30 + ansible.builtin.command: chronyc waitsync 30 diff --git a/roles/bootstrap-ntp/tasks/sync-ntp.yml b/roles/bootstrap-ntp/tasks/sync-ntp.yml index e35da5795..b9f52fb36 100644 --- a/roles/bootstrap-ntp/tasks/sync-ntp.yml +++ b/roles/bootstrap-ntp/tasks/sync-ntp.yml @@ -12,7 +12,7 @@ ## ref: https://superuser.com/questions/639202/updating-time-ntpdate3108-the-ntp-socket-is-in-use-exiting/639516#639516 - name: "Force time synchronization using stepping" - command: "ntpdate -bu {{ __bootstrap_ntp__servers|join(' ') }}" + ansible.builtin.command: "ntpdate -bu {{ __bootstrap_ntp__servers|join(' ') }}" # Fix after RHEL8 GAs! ignore_errors: true @@ -22,4 +22,4 @@ state: started - name: Sync the hardware clock - command: "hwclock --systohc" + ansible.builtin.command: "hwclock --systohc" diff --git a/roles/bootstrap-ntp/tasks/test-chrony.yml b/roles/bootstrap-ntp/tasks/test-chrony.yml index 62997947e..b16eda4c7 100644 --- a/roles/bootstrap-ntp/tasks/test-chrony.yml +++ b/roles/bootstrap-ntp/tasks/test-chrony.yml @@ -4,7 +4,7 @@ ## ref: https://unix.stackexchange.com/questions/554509/chrony-synchronitation ## ref: https://stackoverflow.com/questions/65951206/time-is-not-getting-synchronized-in-chrony-setup - name: "Run chronyc tracking" - command: chronyc tracking + ansible.builtin.command: chronyc tracking register: __chronyc_tracking_results - name: "Display __chronyc_tracking_results" @@ -23,7 +23,7 @@ - (__chronyc_tracking_results.stdout_lines | select('match', "Leap status.*Normal") | list | count) == 1 - name: "Run chronyc sources" - command: chronyc sources + ansible.builtin.command: chronyc sources register: __chronyc_sources_results - name: "Display __chronyc_sources_results" @@ -48,7 +48,7 @@ that: __ntp_server_found|d(False)|bool #- name: "Run chronyc accheck on all servers in __bootstrap_ntp__servers" -# command: "chronyc accheck {{ item }}" +# ansible.builtin.command: "chronyc accheck {{ item }}" # loop: "{{ __bootstrap_ntp__servers }}" # register: __chronyc_accheck_results # diff --git a/roles/bootstrap-ntp/tasks/test-ntp.yml b/roles/bootstrap-ntp/tasks/test-ntp.yml index b79608740..d36e40db5 100644 --- a/roles/bootstrap-ntp/tasks/test-ntp.yml +++ b/roles/bootstrap-ntp/tasks/test-ntp.yml @@ -2,7 +2,7 @@ ## ref: https://vitux.com/how-to-install-ntp-server-and-client-on-ubuntu/ - name: "Query Time Synchronization Queue" - command: ntpq -p + ansible.builtin.command: ntpq -p register: __ntp_sources_results - name: "Display __ntp_sources_results" diff --git a/roles/bootstrap-packer/tasks/main.yml b/roles/bootstrap-packer/tasks/main.yml index 0feb82d56..cb22454cd 100644 --- a/roles/bootstrap-packer/tasks/main.yml +++ b/roles/bootstrap-packer/tasks/main.yml @@ -26,7 +26,7 @@ - name: "Get installed packer version." environment: PATH: "{{ ansible_env.PATH }}:/usr/local/bin" - command: "packer --version" + ansible.builtin.command: "packer --version" ignore_errors: yes changed_when: false # failed_when: false diff --git a/roles/bootstrap-postfix/handlers/main.yml b/roles/bootstrap-postfix/handlers/main.yml index 8b80ea988..f84acfd1d 100644 --- a/roles/bootstrap-postfix/handlers/main.yml +++ b/roles/bootstrap-postfix/handlers/main.yml @@ -7,42 +7,42 @@ when: bootstrap_postfix__service_state | default('started') == 'started' - name: new_aliases - command: > + ansible.builtin.command: > newaliases when: bootstrap_postfix__default_database_type != 'regexp' - name: new_virtual_aliases - command: > + ansible.builtin.command: > postmap {{ bootstrap_postfix__default_database_type }}:{{ bootstrap_postfix__virtual_aliases_file }} when: bootstrap_postfix__default_database_type != 'regexp' - name: postmap_sasl_passwd - command: > + ansible.builtin.command: > postmap {{ bootstrap_postfix__default_database_type }}:{{ bootstrap_postfix__sasl_passwd_file }} when: bootstrap_postfix__default_database_type != 'regexp' - name: postmap_generic - command: > + ansible.builtin.command: > postmap {{ bootstrap_postfix__smtp_generic_maps_database_type }}:{{ bootstrap_postfix__smtp_generic_maps_file }} when: bootstrap_postfix__smtp_generic_maps_database_type != 'regexp' - name: postmap_sender_canonical_maps - command: > + ansible.builtin.command: > postmap {{ bootstrap_postfix__sender_canonical_maps_database_type }}:{{ bootstrap_postfix__sender_canonical_maps_file }} when: bootstrap_postfix__sender_canonical_maps_database_type != 'regexp' - name: postmap_sender_dependent_relayhost_maps - command: > + ansible.builtin.command: > postmap {{ bootstrap_postfix__default_database_type }}:{{ bootstrap_postfix__sender_dependent_relayhost_maps_file }} when: bootstrap_postfix__sender_canonical_maps_database_type != 'regexp' - name: postmap_recipient_canonical_maps - command: > + ansible.builtin.command: > postmap {{ bootstrap_postfix__recipient_canonical_maps_database_type }}:{{ bootstrap_postfix__recipient_canonical_maps_file }} when: bootstrap_postfix__recipient_canonical_maps_database_type != 'regexp' - name: postmap_transport_maps - command: > + ansible.builtin.command: > postmap {{ bootstrap_postfix__transport_maps_database_type }}:{{ bootstrap_postfix__transport_maps_file }} when: bootstrap_postfix__transport_maps_database_type != 'regexp' diff --git a/roles/bootstrap-python3/tasks/install-pip.yml b/roles/bootstrap-python3/tasks/install-pip.yml index cd9cccff6..9fcef8a78 100644 --- a/roles/bootstrap-python3/tasks/install-pip.yml +++ b/roles/bootstrap-python3/tasks/install-pip.yml @@ -2,7 +2,7 @@ ## ref: https://github.com/William-Yeh/ansible-uwsgi/blob/master/tasks/install-pip.yml - name: check to see if pip is already installed - command: "pip --version" + ansible.builtin.command: "pip --version" ignore_errors: true register: __pip_is_installed changed_when: false @@ -17,7 +17,7 @@ dest: /tmp - name: "Install pip" - command: "{{ __bootstrap_python__python_bin_path }} /tmp/get-pip.py" + ansible.builtin.command: "{{ __bootstrap_python__python_bin_path }} /tmp/get-pip.py" - name: delete get-pip.py file: diff --git a/roles/bootstrap_awx/tasks/awx_setup.yml b/roles/bootstrap_awx/tasks/awx_setup.yml index 797c1a35c..50a0773ec 100644 --- a/roles/bootstrap_awx/tasks/awx_setup.yml +++ b/roles/bootstrap_awx/tasks/awx_setup.yml @@ -74,11 +74,11 @@ checksum: sha256:be169fc5333a80c5c4e272f1603ab65361636e27ed513a1ae2c701f93893c0fc - name: Apply cert-manager.yaml manifest - command: | + ansible.builtin.command: | kubectl apply -f /root/cert-manager/cert-manager.yaml - name: Check the cert-manager API until it's ready (max 10min) - command: | + ansible.builtin.command: | cmctl check api --kubeconfig /etc/rancher/k3s/k3s.yaml register: result until: result.stdout.find("The cert-manager API is ready") != -1 @@ -94,7 +94,7 @@ mode: '0640' - name: Deploy the issuer - command: | + ansible.builtin.command: | kubectl apply -k /root/awx-on-k3s-1.1.0/acme - name: Copy base/middleware.yaml @@ -106,7 +106,7 @@ mode: '0640' - name: Apply middleware.yaml to redirect HTTP to HTTPS - command: | + ansible.builtin.command: | kubectl -n default apply -f /root/awx-on-k3s-1.1.0/base/middleware.yaml - name: Prepare Persistent Volume directories (1/2) @@ -128,11 +128,11 @@ mode: '0755' - name: Deploy AWX/Automation Controller onto K3s - command: | + ansible.builtin.command: | kubectl apply -k /root/awx-on-k3s-1.1.0/base - name: Check if AWX/Automation Controller is deployed (max 10min) - command: | + ansible.builtin.command: | kubectl -n awx logs --tail=10 deployments/awx-operator-controller-manager -c awx-manager register: result until: result.stdout.find("failed=0") != -1 @@ -141,5 +141,5 @@ delay: 30 - name: Deploy the required AWX/Automation Controller Operator objects in the awx namespace. - command: | + ansible.builtin.command: | kubectl -n awx get awx,all,ingress,secrets diff --git a/roles/bootstrap_awx/tasks/rancher_setup.yml b/roles/bootstrap_awx/tasks/rancher_setup.yml index 0faa6bfb5..238da9598 100644 --- a/roles/bootstrap_awx/tasks/rancher_setup.yml +++ b/roles/bootstrap_awx/tasks/rancher_setup.yml @@ -22,11 +22,11 @@ kubectl create namespace cattle-system --dry-run=client -o yaml | kubectl apply -f - - name: Add the helm repo - command: | + ansible.builtin.command: | /snap/bin/helm repo add rancher-stable https://releases.rancher.com/server-charts/stable - name: Run a helm repo update - command: | + ansible.builtin.command: | /snap/bin/helm repo update - name: Deploy the stable Rancher diff --git a/roles/bootstrap_awx/tasks/server_setup.yml b/roles/bootstrap_awx/tasks/server_setup.yml index 8b2aa4ac2..0e6889b23 100644 --- a/roles/bootstrap_awx/tasks/server_setup.yml +++ b/roles/bootstrap_awx/tasks/server_setup.yml @@ -1,7 +1,7 @@ --- - name: Set server hostname to AWX / Ansible Controller URL - command: | + ansible.builtin.command: | hostnamectl set-hostname {{ bootstrap_awx_awx_url }} - name: Update repos diff --git a/roles/caddy2/tasks/main.yml b/roles/caddy2/tasks/main.yml index 21a08cb2a..5042a55fd 100644 --- a/roles/caddy2/tasks/main.yml +++ b/roles/caddy2/tasks/main.yml @@ -8,7 +8,7 @@ system: yes - name: Getting Caddy 2 - unarchive: + ansible.builtin.unarchive: url: '{{ caddy_bin_url }}' remote_src: yes dest: '{{ caddy_install_root }}' @@ -30,7 +30,7 @@ mode: ug=rw,o=r - name: Starting service - systemd: + ansible.builtin.systemd: daemon-reload: yes name: caddy2 enabled: yes diff --git a/roles/cloudstack-master/tasks/initialize-api-config.yml b/roles/cloudstack-master/tasks/initialize-api-config.yml index 2a601ddfe..8f4b60f5f 100644 --- a/roles/cloudstack-master/tasks/initialize-api-config.yml +++ b/roles/cloudstack-master/tasks/initialize-api-config.yml @@ -63,13 +63,13 @@ snap_date: "{{ lookup('pipe', 'date +%Y%m%d_%H%M') }}" tags: - get-csmaster-apikey - command: "mv ~/{{ cloudstack_conf_file }} ~/{{ cloudstack_conf_file }}.{{ snap_date }}" + ansible.builtin.command: "mv ~/{{ cloudstack_conf_file }} ~/{{ cloudstack_conf_file }}.{{ snap_date }}" when: cs_ini_stat.stat.exists - name: "create {{ cloudstack_conf_file }} on {{ ansible_host }}" tags: - get-csmaster-apikey - command: "{{ CSUtils.VenvDir }}/bin/python {{ CSUtils.AppDir }}/cs-utils.py" + ansible.builtin.command: "{{ CSUtils.VenvDir }}/bin/python {{ CSUtils.AppDir }}/cs-utils.py" args: creates: "~/{{ cloudstack_conf_file }}" diff --git a/roles/cobbler/handlers/main.yml b/roles/cobbler/handlers/main.yml index 1b61c799a..4ff42705a 100644 --- a/roles/cobbler/handlers/main.yml +++ b/roles/cobbler/handlers/main.yml @@ -2,25 +2,25 @@ # handlers file for rbicker.cobbler - name: restart apache - service: + ansible.builtin.service: name: httpd state: restarted - name: restart cobblerd - service: + ansible.builtin.service: name: cobblerd state: restarted - name: cobbler sync - command: /usr/bin/cobbler sync + ansible.builtin.command: /usr/bin/cobbler sync - name: reload firewalld - command: firewall-cmd --reload + ansible.builtin.command: firewall-cmd --reload - name: restart firewalld tags: - firewall - service: + ansible.builtin.service: name: firewalld state: restarted diff --git a/roles/cobbler/tasks/RedHat.yml b/roles/cobbler/tasks/RedHat.yml index 5cf8a0778..c07983540 100644 --- a/roles/cobbler/tasks/RedHat.yml +++ b/roles/cobbler/tasks/RedHat.yml @@ -34,32 +34,32 @@ persistent: yes - name: ensure httpd service is enabled - service: + ansible.builtin.service: name: httpd enabled: yes state: started - name: ensure xinetd service is enabled and started - service: + ansible.builtin.service: name: xinetd enabled: yes state: started - name: ensure named-chroot is enabled and started - service: + ansible.builtin.service: name: named-chroot enabled: yes state: started when: cobbler_manage_dns | bool - name: ensure dhcpd - service: + ansible.builtin.service: name: dhcpd enabled: yes when: cobbler_manage_dhcp | bool - name: ensure cobbler services is enabled and started - service: + ansible.builtin.service: name: cobblerd enabled: yes state: started @@ -96,35 +96,35 @@ when: cobbler_templates_force | bool - name: ensure server is set in /etc/cobbler/settings - lineinfile: + ansible.builtin.lineinfile: line: "server: {{ ansible_default_ipv4.address }}" regexp: "^server:" dest: /etc/cobbler/settings notify: restart cobblerd - name: ensure next_server is set in /etc/cobbler/settings - lineinfile: + ansible.builtin.lineinfile: line: "next_server: {{ ansible_default_ipv4.address }}" regexp: "^next_server:" dest: /etc/cobbler/settings notify: restart cobblerd - name: ensure default_password_crypted is set in /etc/cobbler/settings - lineinfile: + ansible.builtin.lineinfile: line: "default_password_crypted: {{ cobbler_default_password_crypted }}" regexp: "^default_password_crypted:" dest: /etc/cobbler/settings notify: restart cobblerd - name: ensure pxe_just_once is enabled in /etc/cobbler/settings - lineinfile: + ansible.builtin.lineinfile: line: "pxe_just_once: {{ cobbler_pxe_just_once }}" regexp: "^pxe_just_once:" dest: /etc/cobbler/settings notify: restart cobblerd - name: ensure authn_configfile is set in /etc/cobbler/modules.conf for cobbler web - lineinfile: + ansible.builtin.lineinfile: line: "module = authn_configfile" regexp: "^module = authn" insertafter: "[authentication]" @@ -134,7 +134,7 @@ - restart apache - name: ensure authz_allowall is set in /etc/cobbler/modules.conf for cobbler web - lineinfile: + ansible.builtin.lineinfile: line: "module = authz_allowall" regexp: "^module = authz" insertafter: "[authorization]" @@ -152,14 +152,14 @@ - restart apache - name: ensure dhcp management is enabled in /etc/cobbler/settings - lineinfile: + ansible.builtin.lineinfile: line: "manage_dhcp: {{ cobbler_manage_dhcp }}" regexp: "^manage_dhcp:" dest: /etc/cobbler/settings notify: restart cobblerd - name: ensure dns management is enabled in /etc/cobbler/settings - lineinfile: + ansible.builtin.lineinfile: line: "manage_dns: {{ cobbler_manage_dns }}" regexp: "^manage_dns:" dest: /etc/cobbler/settings @@ -167,14 +167,14 @@ register: cobbler_dns_enabled - name: ensure /etc/secondary.conf exists with se type cobbler_var_lib_t - file: + ansible.builtin.file: state: touch setype: cobbler_var_lib_t path: /etc/secondary.conf when: cobbler_dns_enabled.changed - name: ensure dns forward zones are set in /etc/cobbler/settings - lineinfile: + ansible.builtin.lineinfile: line: "manage_forward_zones: {{ cobbler_manage_forward_zones }}" regexp: "^manage_forward_zones:" dest: /etc/cobbler/settings @@ -182,7 +182,7 @@ when: cobbler_manage_dns | bool - name: ensure dns reverse zones are set in /etc/cobbler/settings - lineinfile: + ansible.builtin.lineinfile: line: "manage_reverse_zones: {{ cobbler_manage_reverse_zones }}" regexp: "^manage_reverse_zones:" dest: /etc/cobbler/settings @@ -190,36 +190,36 @@ when: cobbler_manage_dns | bool - name: ensure di_dists directive is set in /etc/debmirror.conf - lineinfile: + ansible.builtin.lineinfile: line: "@di_dists=\"dists\";" regexp: "@di_dists=" dest: /etc/debmirror.conf - name: ensure di_archs directive is set in /etc/debmirror.conf - lineinfile: + ansible.builtin.lineinfile: line: "@di_archs=\"arches\";" regexp: "@di_archs=" dest: /etc/debmirror.conf - name: ensure cobbler profile folder exists in /var/www/cobbler/ks_mirror/ - file: + ansible.builtin.file: path: "/var/www/cobbler/ks_mirror/{{ item.name }}-{{ item.arch }}" state: directory with_items: "{{ cobbler_profiles }}" register: cobbler_profiles_folders - name: ensure folder for iso files exist - file: + ansible.builtin.file: path: "{{ cobbler_isos_path }}" state: directory - name: ensure iso mount path exists - file: + ansible.builtin.file: path: "{{ cobbler_isos_mount_path }}" state: directory - name: ensure iso files are present - get_url: + ansible.builtin.get_url: url: "{{ item.item.url }}" dest: "{{ cobbler_isos_path }}/{{ item.item.name }}.iso" with_items: "{{ cobbler_profiles_folders.results }}" diff --git a/roles/cobbler/tasks/main.yml b/roles/cobbler/tasks/main.yml index 5cf8a0778..e594ec08e 100644 --- a/roles/cobbler/tasks/main.yml +++ b/roles/cobbler/tasks/main.yml @@ -34,32 +34,32 @@ persistent: yes - name: ensure httpd service is enabled - service: + ansible.builtin.service: name: httpd enabled: yes state: started - name: ensure xinetd service is enabled and started - service: + ansible.builtin.service: name: xinetd enabled: yes state: started - name: ensure named-chroot is enabled and started - service: + ansible.builtin.service: name: named-chroot enabled: yes state: started when: cobbler_manage_dns | bool - name: ensure dhcpd - service: + ansible.builtin.service: name: dhcpd enabled: yes when: cobbler_manage_dhcp | bool - name: ensure cobbler services is enabled and started - service: + ansible.builtin.service: name: cobblerd enabled: yes state: started @@ -96,35 +96,35 @@ when: cobbler_templates_force | bool - name: ensure server is set in /etc/cobbler/settings - lineinfile: + ansible.builtin.lineinfile: line: "server: {{ ansible_default_ipv4.address }}" regexp: "^server:" dest: /etc/cobbler/settings notify: restart cobblerd - name: ensure next_server is set in /etc/cobbler/settings - lineinfile: + ansible.builtin.lineinfile: line: "next_server: {{ ansible_default_ipv4.address }}" regexp: "^next_server:" dest: /etc/cobbler/settings notify: restart cobblerd - name: ensure default_password_crypted is set in /etc/cobbler/settings - lineinfile: + ansible.builtin.lineinfile: line: "default_password_crypted: {{ cobbler_default_password_crypted }}" regexp: "^default_password_crypted:" dest: /etc/cobbler/settings notify: restart cobblerd - name: ensure pxe_just_once is enabled in /etc/cobbler/settings - lineinfile: + ansible.builtin.lineinfile: line: "pxe_just_once: {{ cobbler_pxe_just_once }}" regexp: "^pxe_just_once:" dest: /etc/cobbler/settings notify: restart cobblerd - name: ensure authn_configfile is set in /etc/cobbler/modules.conf for cobbler web - lineinfile: + ansible.builtin.lineinfile: line: "module = authn_configfile" regexp: "^module = authn" insertafter: "[authentication]" @@ -134,7 +134,7 @@ - restart apache - name: ensure authz_allowall is set in /etc/cobbler/modules.conf for cobbler web - lineinfile: + ansible.builtin.lineinfile: line: "module = authz_allowall" regexp: "^module = authz" insertafter: "[authorization]" @@ -152,14 +152,14 @@ - restart apache - name: ensure dhcp management is enabled in /etc/cobbler/settings - lineinfile: + ansible.builtin.lineinfile: line: "manage_dhcp: {{ cobbler_manage_dhcp }}" regexp: "^manage_dhcp:" dest: /etc/cobbler/settings notify: restart cobblerd - name: ensure dns management is enabled in /etc/cobbler/settings - lineinfile: + ansible.builtin.lineinfile: line: "manage_dns: {{ cobbler_manage_dns }}" regexp: "^manage_dns:" dest: /etc/cobbler/settings @@ -167,14 +167,14 @@ register: cobbler_dns_enabled - name: ensure /etc/secondary.conf exists with se type cobbler_var_lib_t - file: + ansible.builtin.file: state: touch setype: cobbler_var_lib_t path: /etc/secondary.conf when: cobbler_dns_enabled.changed - name: ensure dns forward zones are set in /etc/cobbler/settings - lineinfile: + ansible.builtin.lineinfile: line: "manage_forward_zones: {{ cobbler_manage_forward_zones }}" regexp: "^manage_forward_zones:" dest: /etc/cobbler/settings @@ -182,7 +182,7 @@ when: cobbler_manage_dns | bool - name: ensure dns reverse zones are set in /etc/cobbler/settings - lineinfile: + ansible.builtin.lineinfile: line: "manage_reverse_zones: {{ cobbler_manage_reverse_zones }}" regexp: "^manage_reverse_zones:" dest: /etc/cobbler/settings @@ -190,36 +190,36 @@ when: cobbler_manage_dns | bool - name: ensure di_dists directive is set in /etc/debmirror.conf - lineinfile: + ansible.builtin.lineinfile: line: "@di_dists=\"dists\";" regexp: "@di_dists=" dest: /etc/debmirror.conf - name: ensure di_archs directive is set in /etc/debmirror.conf - lineinfile: + ansible.builtin.lineinfile: line: "@di_archs=\"arches\";" regexp: "@di_archs=" dest: /etc/debmirror.conf - name: ensure cobbler profile folder exists in /var/www/cobbler/ks_mirror/ - file: + ansible.builtin.file: path: "/var/www/cobbler/ks_mirror/{{ item.name }}-{{ item.arch }}" state: directory with_items: "{{ cobbler_profiles }}" register: cobbler_profiles_folders - name: ensure folder for iso files exist - file: + ansible.builtin.file: path: "{{ cobbler_isos_path }}" state: directory - name: ensure iso mount path exists - file: + ansible.builtin.file: path: "{{ cobbler_isos_mount_path }}" state: directory - name: ensure iso files are present - get_url: + ansible.builtin.get_url: url: "{{ item.item.url }}" dest: "{{ cobbler_isos_path }}/{{ item.item.name }}.iso" with_items: "{{ cobbler_profiles_folders.results }}" @@ -262,7 +262,7 @@ with_items: "{{ cobbler_profiles_folders.results }}" - name: ensure isos are deleted after import - file: + ansible.builtin.file: path: "{{ cobbler_isos_path }}/{{ item.item.name }}.iso" state: absent when: cobbler_isos_delete == true and item.changed == true diff --git a/roles/dell-racadm-host/tasks/racadm-setup-FWupdates.yml b/roles/dell-racadm-host/tasks/racadm-setup-FWupdates.yml index 3d403d7a6..7ccf9fcc2 100644 --- a/roles/dell-racadm-host/tasks/racadm-setup-FWupdates.yml +++ b/roles/dell-racadm-host/tasks/racadm-setup-FWupdates.yml @@ -140,5 +140,5 @@ - debug: var=result_jobqueue # - name: End the play when there are no updates available -# meta: end_play +# ansible.builtin.meta: end_play # when: '"The server firmware is up to date and matches the catalog definitions" in catalog_report.stdout' diff --git a/roles/dell-racadm-host/tasks/racadm-setup-iDRAC-settings.yml b/roles/dell-racadm-host/tasks/racadm-setup-iDRAC-settings.yml index cf0aebae1..725ce5deb 100644 --- a/roles/dell-racadm-host/tasks/racadm-setup-iDRAC-settings.yml +++ b/roles/dell-racadm-host/tasks/racadm-setup-iDRAC-settings.yml @@ -49,7 +49,7 @@ - name: "Get idrac certificate serial number info" delegate_to: localhost become: no - command: "openssl x509 -in {{ cacert_certs_dir }}/{{ hostname_name_full }}.pem -noout -serial" + ansible.builtin.command: "openssl x509 -in {{ cacert_certs_dir }}/{{ hostname_name_full }}.pem -noout -serial" register: cacert_serial_result changed_when: false diff --git a/roles/dell-racadm-host/tasks/setup_disks.yml b/roles/dell-racadm-host/tasks/setup_disks.yml index ec5dc32e5..803f5a458 100644 --- a/roles/dell-racadm-host/tasks/setup_disks.yml +++ b/roles/dell-racadm-host/tasks/setup_disks.yml @@ -6,12 +6,12 @@ - xfsprogs - name: Check if RAID0 device exists - command: /usr/sbin/mdadm --detail /dev/md0 + ansible.builtin.command: /usr/sbin/mdadm --detail /dev/md0 register: md0_result ignore_errors: true - name: Create RAID0 device - command: mdadm --create --verbose {{ raid_device }} --chunk=256 --level=0 --name=data --raid-devices={{ ssd_devices|length }} {{ ssd_devices|join(' ') }} + ansible.builtin.command: mdadm --create --verbose {{ raid_device }} --chunk=256 --level=0 --name=data --raid-devices={{ ssd_devices|length }} {{ ssd_devices|join(' ') }} when: md0_result|failed - name: Create filesystem diff --git a/roles/deploy-cacerts/handlers/main.yml b/roles/deploy-cacerts/handlers/main.yml index c257cf2f8..e42a979a8 100644 --- a/roles/deploy-cacerts/handlers/main.yml +++ b/roles/deploy-cacerts/handlers/main.yml @@ -1,4 +1,4 @@ --- - name: reload systemctl - command: systemctl daemon-reload + ansible.builtin.command: systemctl daemon-reload diff --git a/roles/deploy-cacerts/tasks/fetch_certs.yml b/roles/deploy-cacerts/tasks/fetch_certs.yml index 52a5b0029..675a677fe 100644 --- a/roles/deploy-cacerts/tasks/fetch_certs.yml +++ b/roles/deploy-cacerts/tasks/fetch_certs.yml @@ -139,7 +139,7 @@ dest: "{{ __deploy_cacerts__trust_ca_cacert_dir }}/{{ deploy_cacerts__ca_root_cn }}.{{ deploy_cacerts__trust_ca_cert_extension }}" - name: "trust_cert | update CA trust: {{ __deploy_cacerts__trust_ca_update_trust_cmd }}" - command: '{{ __deploy_cacerts__trust_ca_update_trust_cmd }}' + ansible.builtin.command: '{{ __deploy_cacerts__trust_ca_update_trust_cmd }}' - name: "{{ __log_prefix_fetch }} Fetch host cert and key" when: deploy_cacerts__deploy_host_certs|d(True)|bool diff --git a/roles/deploy-cacerts/tasks/trust_cert.yml b/roles/deploy-cacerts/tasks/trust_cert.yml index a249b3fd3..989539d86 100644 --- a/roles/deploy-cacerts/tasks/trust_cert.yml +++ b/roles/deploy-cacerts/tasks/trust_cert.yml @@ -63,11 +63,11 @@ ## ref: https://access.redhat.com/documentation/en-us/red_hat_enterprise_linux/8/html/security_hardening/using-shared-system-certificates_security-hardening ## ref: https://techjourney.net/update-add-ca-certificates-bundle-in-redhat-centos/ - name: "trust_cert | update CA trust for newly added external certs" - command: "{{ __deploy_cacerts__trust_ca_update_trust_cmd }}" + ansible.builtin.command: "{{ __deploy_cacerts__trust_ca_update_trust_cmd }}" # when: trust_ca_cacertinstalled is changed or deploy_cacerts__ca_force_distribute_nodes|bool #- name: convert to pkcs12 -# command: openssl pkcs12 -export \ +# ansible.builtin.command: openssl pkcs12 -export \ # -in {{ ca_path }}/{{ cert_node.domainName }}/{{ cert_node.commonName }}.crt \ # -inkey {{ ca_path }}/{{ cert_node.domainName}}/{{ cert_node.commonName }}.key \ # -out {{ ca_path }}/{{ cert_node.domainName }}/{{ cert_node.commonName }}.p12 \ @@ -78,7 +78,7 @@ #- name: "trust_cert | Add service cert to keystore" # when: deploy_cacerts__ca_java_keystore_enabled|bool -# command: | +# ansible.builtin.command: | # keytool -importcert \ # -storepass {{ deploy_cacerts__ca_java_keystore_pass }} \ # -keystore {{ __deploy_cacerts__ca_java_keystore }} \ @@ -91,7 +91,7 @@ #- name: "trust_cert | Add root ca root and host certs to keystore" # when: deploy_cacerts__ca_java_keystore_enabled|bool -# command: | +# ansible.builtin.command: | # keytool -importcert -v \ # -storepass {{ deploy_cacerts__ca_java_keystore_pass }} \ # -noprompt \ @@ -111,7 +111,7 @@ ## ref: https://superuser.com/questions/881665/keytool-commands-to-replace-existing-ssl-certificate ## ref: https://stackoverflow.com/questions/48204014/how-to-delete-already-import-certificate-alias-by-keytool-command - name: "trust_cert | Remove old service cert from keystore" - command: | + ansible.builtin.command: | keytool -delete -v \ -storepass {{ deploy_cacerts__ca_java_keystore_pass }} \ -noprompt \ @@ -127,7 +127,7 @@ verbosity: 1 - name: "trust_cert | Add current service cert to keystore" - command: | + ansible.builtin.command: | keytool -importcert -v \ -storepass {{ deploy_cacerts__ca_java_keystore_pass }} \ -noprompt \ diff --git a/roles/deploy-cacerts/tasks/trust_external_certs.yml b/roles/deploy-cacerts/tasks/trust_external_certs.yml index 5ea9e3b7d..ac15df22f 100644 --- a/roles/deploy-cacerts/tasks/trust_external_certs.yml +++ b/roles/deploy-cacerts/tasks/trust_external_certs.yml @@ -65,5 +65,5 @@ loop_var: __site_cert_info - name: "{{ __log_prefix_trust_ext }} update CA trust for newly added external certs" - command: "{{ __deploy_cacerts__trust_ca_update_trust_cmd }}" + ansible.builtin.command: "{{ __deploy_cacerts__trust_ca_update_trust_cmd }}" # when: trust_ca_cacertinstalled is changed or deploy_cacerts__ca_force_distribute_nodes|bool diff --git a/roles/deploy-vm/handlers/main.yml b/roles/deploy-vm/handlers/main.yml index a755ee1e1..6e3c4adfd 100644 --- a/roles/deploy-vm/handlers/main.yml +++ b/roles/deploy-vm/handlers/main.yml @@ -1,4 +1,4 @@ --- - name: 'sleep' - pause: + ansible.builtin.pause: seconds: 5 diff --git a/roles/deploy-vm/tasks/config-vmware-vm-linux.yml b/roles/deploy-vm/tasks/config-vmware-vm-linux.yml index e3ba0e86a..160ab36d6 100644 --- a/roles/deploy-vm/tasks/config-vmware-vm-linux.yml +++ b/roles/deploy-vm/tasks/config-vmware-vm-linux.yml @@ -98,20 +98,20 @@ state: poweredoff - name: "config-vm-vmware-linux[{{ __vm_info.name }}] | Unregister VM" - command: "govc vm.unregister {{ __vm_info.name }}" + ansible.builtin.command: "govc vm.unregister {{ __vm_info.name }}" environment: "{{ deploy_vm_govc_environment }}" ## ref: https://opensourcelibs.com/lib/govc - name: "config-vm-vmware-linux[{{ __vm_info.name }}] | Create directory {{ __vm_info.datastore_folder }} on VM datastore {{ __vm_info.datastore }}" - command: "govc datastore.mkdir -p -ds={{ __vm_info.datastore }} {{ __vm_info.datastore_folder }}" + ansible.builtin.command: "govc datastore.mkdir -p -ds={{ __vm_info.datastore }} {{ __vm_info.datastore_folder }}" environment: "{{ deploy_vm_govc_environment }}" - name: "config-vm-vmware-linux[{{ __vm_info.name }}] | Move VM datastore folder to {{ __vm_info.datastore_folder }}" - command: "govc datastore.mv -f -ds={{ __vm_info.datastore }} {{ __vm_info.name }} {{ __vm_info.datastore_folder }}/{{ __vm_info.name }}" + ansible.builtin.command: "govc datastore.mv -f -ds={{ __vm_info.datastore }} {{ __vm_info.name }} {{ __vm_info.datastore_folder }}/{{ __vm_info.name }}" environment: "{{ deploy_vm_govc_environment }}" - name: "config-vm-vmware-linux[{{ __vm_info.name }}] | Register VM" - command: "govc vm.register -template=false -ds={{ __vm_info.datastore }} -folder={{ __vm_info.folder }} -host={{ __vm_info.host }} {{ __vm_info.datastore_folder }}/{{ __vm_info.name }}/{{ __vm_info.name }}.vmx" + ansible.builtin.command: "govc vm.register -template=false -ds={{ __vm_info.datastore }} -folder={{ __vm_info.folder }} -host={{ __vm_info.host }} {{ __vm_info.datastore_folder }}/{{ __vm_info.name }}/{{ __vm_info.name }}.vmx" environment: "{{ deploy_vm_govc_environment }}" ## ref: https://docs.openshift.com/container-platform/3.6/install_config/configuring_vsphere.html#vsphere-enabling @@ -119,7 +119,7 @@ ## ref: https://github.com/ansible/ansible/commit/d23da2e4943642b007b4e46971244bd66917d4ec ## [root@esx02:~] cat /vmfs/volumes/54453819-a19ddd4c-aa71-001ec956143b/vm/vm02/vm02.vmx - name: "config-vm-vmware-linux[{{ __vm_info.name }}] | Update VM UUID action to keep" - command: "govc vm.change -e='uuid.action=keep' -vm={{ __vm_info.name }}" + ansible.builtin.command: "govc vm.change -e='uuid.action=keep' -vm={{ __vm_info.name }}" environment: "{{ deploy_vm_govc_environment }}" # ## ref: https://www.suse.com/support/kb/doc/?id=000016951 @@ -191,7 +191,7 @@ block: - name: "config-vm-vmware-linux[{{ __vm_info.name }}] | Create working directory on Ansible Controller" - file: + ansible.builtin.file: path: "{{ deploy_vm_workdir }}" state: directory diff --git a/roles/deploy-vm/tasks/deploy-proxmox-vm.yml b/roles/deploy-vm/tasks/deploy-proxmox-vm.yml index 0a8403ef8..e1f7dfde1 100644 --- a/roles/deploy-vm/tasks/deploy-proxmox-vm.yml +++ b/roles/deploy-vm/tasks/deploy-proxmox-vm.yml @@ -36,7 +36,7 @@ register: 'created_cts_pve' - - meta: 'flush_handlers' + - ansible.builtin.meta: 'flush_handlers' when: 'created_cts_pve.changed' - name: 'start containers' @@ -52,5 +52,5 @@ - 'sleep' #when: 'item.changed' - - meta: 'flush_handlers' + - ansible.builtin.meta: 'flush_handlers' when: 'created_cts_pve.changed' diff --git a/roles/deploy-vm/tasks/deploy-vmware-appliance.yml b/roles/deploy-vm/tasks/deploy-vmware-appliance.yml index 0226751a5..63eec51fb 100644 --- a/roles/deploy-vm/tasks/deploy-vmware-appliance.yml +++ b/roles/deploy-vm/tasks/deploy-vmware-appliance.yml @@ -106,7 +106,7 @@ ## ref: https://itspyworld.blogspot.com/2020/07/ansible-deploy-multiple-vm-from.html ## ref: https://blog.crisp.se/2018/01/27/maxwenzin/how-to-run-ansible-tasks-in-parallel - name: Wait for VM creation to finish - async_status: + ansible.builtin.async_status: jid: "{{ item.ansible_job_id }}" loop: "{{ __deploy_vm_job_start.results }}" loop_control: diff --git a/roles/deploy-vm/tasks/deploy-vmware-vm.yml b/roles/deploy-vm/tasks/deploy-vmware-vm.yml index f33f01139..dbcd97be2 100644 --- a/roles/deploy-vm/tasks/deploy-vmware-vm.yml +++ b/roles/deploy-vm/tasks/deploy-vmware-vm.yml @@ -143,7 +143,7 @@ ## ref: https://itspyworld.blogspot.com/2020/07/ansible-deploy-multiple-vm-from.html ## ref: https://blog.crisp.se/2018/01/27/maxwenzin/how-to-run-ansible-tasks-in-parallel - name: Wait for VM creation to finish - async_status: + ansible.builtin.async_status: jid: "{{ item.ansible_job_id }}" loop: "{{ __deploy_vm_job_start.results }}" loop_control: diff --git a/roles/deploy-vm/tasks/main.yml b/roles/deploy-vm/tasks/main.yml index a39873513..c89629f9d 100644 --- a/roles/deploy-vm/tasks/main.yml +++ b/roles/deploy-vm/tasks/main.yml @@ -2,7 +2,7 @@ - name: Install common pip libs when: (deploy_vm_python_pip_depends is defined and not(deploy_vm_python_pip_depends is none)) - pip: + ansible.builtin.pip: name: "{{ deploy_vm_python_pip_depends }}" state: present extra_args: --user diff --git a/roles/deploy-vsphere-vm/common/skip_test_case.yml b/roles/deploy-vsphere-vm/common/skip_test_case.yml index 6bfcb3305..45ac829f9 100644 --- a/roles/deploy-vsphere-vm/common/skip_test_case.yml +++ b/roles/deploy-vsphere-vm/common/skip_test_case.yml @@ -25,4 +25,4 @@ msg: "{{ skip_msg }}" when: skip_reason != "Blocked" -- meta: end_host +- ansible.builtin.meta: end_host diff --git a/roles/deploy-vsphere-vm/linux/gosv_testcase_list.yml b/roles/deploy-vsphere-vm/linux/gosv_testcase_list.yml index 976c6a25c..3e7422012 100644 --- a/roles/deploy-vsphere-vm/linux/gosv_testcase_list.yml +++ b/roles/deploy-vsphere-vm/linux/gosv_testcase_list.yml @@ -1,31 +1,31 @@ --- -- import_playbook: deploy_vm/deploy_vm.yml -- import_playbook: check_inbox_driver/check_inbox_driver.yml -- import_playbook: open_vm_tools/ovt_verify_install.yml -- import_playbook: open_vm_tools/ovt_verify_status.yml -- import_playbook: vgauth_check_service/vgauth_check_service.yml -- import_playbook: check_ip_address/check_ip_address.yml -- import_playbook: check_os_fullname/check_os_fullname.yml -- import_playbook: stat_balloon/stat_balloon.yml -- import_playbook: stat_hosttime/stat_hosttime.yml -- import_playbook: device_list/device_list.yml -- import_playbook: check_quiesce_snapshot_custom_script/check_quiesce_snapshot_custom_script.yml -- import_playbook: memory_hot_add_basic/memory_hot_add_basic.yml -- import_playbook: cpu_hot_add_basic/cpu_hot_add_basic.yml -- import_playbook: cpu_multicores_per_socket/cpu_multicores_per_socket.yml -- import_playbook: check_efi_firmware/check_efi_firmware.yml -- import_playbook: secureboot_enable_disable/secureboot_enable_disable.yml -- import_playbook: network_device_ops/e1000e_network_device_ops.yml -- import_playbook: network_device_ops/vmxnet3_network_device_ops.yml -- import_playbook: guest_customization/gosc_perl_dhcp.yml -- import_playbook: guest_customization/gosc_perl_staticip.yml -- import_playbook: guest_customization/gosc_cloudinit_dhcp.yml -- import_playbook: guest_customization/gosc_cloudinit_staticip.yml -- import_playbook: vhba_hot_add_remove/paravirtual_vhba_device_ops.yml -- import_playbook: vhba_hot_add_remove/lsilogic_vhba_device_ops.yml -- import_playbook: vhba_hot_add_remove/lsilogicsas_vhba_device_ops.yml -- import_playbook: vhba_hot_add_remove/sata_vhba_device_ops.yml -- import_playbook: vhba_hot_add_remove/nvme_vhba_device_ops.yml -- import_playbook: nvdimm_cold_add_remove/nvdimm_cold_add_remove.yml -- import_playbook: open_vm_tools/ovt_verify_uninstall.yml +- ansible.builtin.import_playbook: deploy_vm/deploy_vm.yml +- ansible.builtin.import_playbook: check_inbox_driver/check_inbox_driver.yml +- ansible.builtin.import_playbook: open_vm_tools/ovt_verify_install.yml +- ansible.builtin.import_playbook: open_vm_tools/ovt_verify_status.yml +- ansible.builtin.import_playbook: vgauth_check_service/vgauth_check_service.yml +- ansible.builtin.import_playbook: check_ip_address/check_ip_address.yml +- ansible.builtin.import_playbook: check_os_fullname/check_os_fullname.yml +- ansible.builtin.import_playbook: stat_balloon/stat_balloon.yml +- ansible.builtin.import_playbook: stat_hosttime/stat_hosttime.yml +- ansible.builtin.import_playbook: device_list/device_list.yml +- ansible.builtin.import_playbook: check_quiesce_snapshot_custom_script/check_quiesce_snapshot_custom_script.yml +- ansible.builtin.import_playbook: memory_hot_add_basic/memory_hot_add_basic.yml +- ansible.builtin.import_playbook: cpu_hot_add_basic/cpu_hot_add_basic.yml +- ansible.builtin.import_playbook: cpu_multicores_per_socket/cpu_multicores_per_socket.yml +- ansible.builtin.import_playbook: check_efi_firmware/check_efi_firmware.yml +- ansible.builtin.import_playbook: secureboot_enable_disable/secureboot_enable_disable.yml +- ansible.builtin.import_playbook: network_device_ops/e1000e_network_device_ops.yml +- ansible.builtin.import_playbook: network_device_ops/vmxnet3_network_device_ops.yml +- ansible.builtin.import_playbook: guest_customization/gosc_perl_dhcp.yml +- ansible.builtin.import_playbook: guest_customization/gosc_perl_staticip.yml +- ansible.builtin.import_playbook: guest_customization/gosc_cloudinit_dhcp.yml +- ansible.builtin.import_playbook: guest_customization/gosc_cloudinit_staticip.yml +- ansible.builtin.import_playbook: vhba_hot_add_remove/paravirtual_vhba_device_ops.yml +- ansible.builtin.import_playbook: vhba_hot_add_remove/lsilogic_vhba_device_ops.yml +- ansible.builtin.import_playbook: vhba_hot_add_remove/lsilogicsas_vhba_device_ops.yml +- ansible.builtin.import_playbook: vhba_hot_add_remove/sata_vhba_device_ops.yml +- ansible.builtin.import_playbook: vhba_hot_add_remove/nvme_vhba_device_ops.yml +- ansible.builtin.import_playbook: nvdimm_cold_add_remove/nvdimm_cold_add_remove.yml +- ansible.builtin.import_playbook: open_vm_tools/ovt_verify_uninstall.yml ... diff --git a/roles/deploy-vsphere-vm/main.yml b/roles/deploy-vsphere-vm/main.yml index 39e633758..f26c16b46 100644 --- a/roles/deploy-vsphere-vm/main.yml +++ b/roles/deploy-vsphere-vm/main.yml @@ -19,8 +19,8 @@ dir_path: "{{ local_cache }}" dir_mode: "0777" # Prepare testing environment -- import_playbook: env_setup/env_setup.yml +- ansible.builtin.import_playbook: env_setup/env_setup.yml # Execute test case one by one -- import_playbook: "{{ testing_testcase_file | default('linux/gosv_testcase_list.yml') }}" +- ansible.builtin.import_playbook: "{{ testing_testcase_file | default('linux/gosv_testcase_list.yml') }}" # Cleanup testing environment -- import_playbook: env_setup/env_cleanup.yml +- ansible.builtin.import_playbook: env_setup/env_cleanup.yml diff --git a/roles/deploy-vsphere-vm/windows/gosv_testcase_list.yml b/roles/deploy-vsphere-vm/windows/gosv_testcase_list.yml index a7408fccb..652615950 100644 --- a/roles/deploy-vsphere-vm/windows/gosv_testcase_list.yml +++ b/roles/deploy-vsphere-vm/windows/gosv_testcase_list.yml @@ -1,12 +1,12 @@ --- -- import_playbook: deploy_vm/deploy_vm.yml -- import_playbook: check_inbox_driver/check_inbox_driver.yml -- import_playbook: secureboot_enable_disable/secureboot_enable_disable.yml -- import_playbook: wintools_complete_install_verify/wintools_complete_install_verify.yml -- import_playbook: check_efi_firmware/check_efi_firmware.yml -- import_playbook: check_ip_address/check_ip_address.yml -- import_playbook: check_os_fullname/check_os_fullname.yml -- import_playbook: mouse_driver_vmtools/mouse_driver_vmtools.yml +- ansible.builtin.import_playbook: deploy_vm/deploy_vm.yml +- ansible.builtin.import_playbook: check_inbox_driver/check_inbox_driver.yml +- ansible.builtin.import_playbook: secureboot_enable_disable/secureboot_enable_disable.yml +- ansible.builtin.import_playbook: wintools_complete_install_verify/wintools_complete_install_verify.yml +- ansible.builtin.import_playbook: check_efi_firmware/check_efi_firmware.yml +- ansible.builtin.import_playbook: check_ip_address/check_ip_address.yml +- ansible.builtin.import_playbook: check_os_fullname/check_os_fullname.yml +- ansible.builtin.import_playbook: mouse_driver_vmtools/mouse_driver_vmtools.yml - import_playbook: vgauth_check_service/vgauth_check_service.yml - import_playbook: stat_balloon/stat_balloon.yml - import_playbook: vhba_hot_add_remove/paravirtual_vhba_device_ops.yml diff --git a/roles/docker-stack/tasks/setup-service-configs.yml b/roles/docker-stack/tasks/setup-service-configs.yml index c475252c7..1de40292a 100644 --- a/roles/docker-stack/tasks/setup-service-configs.yml +++ b/roles/docker-stack/tasks/setup-service-configs.yml @@ -68,7 +68,8 @@ driver: "{{ item.value.driver | d(omit) }}" # scope: "{{ item.value.scope | d('swarm' if item.value.attachable|d(False)|bool else omit) }}" scope: "{{ item.value.scope - | d('swarm' if (item.value.attachable|d(False)|bool or item.value.driver=='overlay') else omit) }}" + | d('swarm' if (item.value.attachable|d(False)|bool or item.value.driver in ['overlay', 'bridge']) + else 'local') }}" attachable: "{{ item.value.attachable | d(omit) }}" # internal: "{{ (not item.value.external) | d(omit) }}" with_dict: "{{ __docker_stack__networks }}" diff --git a/roles/docker-stack/templates/ansible_template_ui/ansible-template-ui.env.j2 b/roles/docker-stack/templates/ansible_template_ui/ansible-template-ui.env.j2 new file mode 100644 index 000000000..afc8e5a3e --- /dev/null +++ b/roles/docker-stack/templates/ansible_template_ui/ansible-template-ui.env.j2 @@ -0,0 +1,15 @@ +{{ ansible_managed | comment }} + +{% if __docker_stack__ansibletemplateui__path_prefix is defined %} +SCRIPT_NAME="{{ __docker_stack__ansibletemplateui__path_prefix }}" +{% endif %} + +################################ +## vaulted credentials +DOCKER_REGISTRY_USERNAME=dksec://docker_registry_username +DOCKER_REGISTRY_PASSWORD=dksec://docker_registry_password + +LOG_LEVEL=DEBUG +{% if __docker_stack__ansibletemplateui__ansible_ee_image is defined %} +DOCKER_ANSIBLE_EE_IMAGE={{ __docker_stack__ansibletemplateui__ansible_ee_image }} +{% endif %} diff --git a/roles/docker-stack/vars/app-services/common/docker_stack_ansibletemplateui.yml b/roles/docker-stack/vars/app-services/common/docker_stack_ansibletemplateui.yml index 0a5410aa3..7701d006b 100644 --- a/roles/docker-stack/vars/app-services/common/docker_stack_ansibletemplateui.yml +++ b/roles/docker-stack/vars/app-services/common/docker_stack_ansibletemplateui.yml @@ -1,5 +1,11 @@ --- +__docker_stack__ansibletemplateui__stack_dir: "{{ docker_stack__dir | d('/home/user/docker-dirs') }}" +__docker_stack__ansibletemplateui__base_dir_default: "{{ + __docker_stack__ansibletemplateui__stack_dir }}/ansible_template_ui" +__docker_stack__ansibletemplateui__base_dir: "{{ docker_stack__ansibletemplateui__dir + | d(__docker_stack__ansibletemplateui__base_dir_default) }}" + #__docker_stack__ansibletemplateui__image_default: "lj020326/ansible-template-ui:devel" __docker_stack__ansibletemplateui__image_default: "{{ docker_stack__registry_endpoint }}/ansible-template-ui:latest" __docker_stack__ansibletemplateui__image: "{{ docker_stack__ansibletemplateui__image @@ -9,8 +15,34 @@ __docker_stack__ansibletemplateui__http_port: "{{ docker_stack__ansibletemplateu __docker_stack__ansibletemplateui__traefik_labels: "{{ docker_stack__ansibletemplateui__traefik_labels | d(__docker_stack__ansibletemplateui__traefik_labels_default) }}" +#__docker_stack__ansibletemplateui__ansible_ee_image_default: lj020326/ansible-execution-env +#__docker_stack__ansibletemplateui__ansible_ee_image_default: media.johnson.int:5000/ansible-execution-env:latest +__docker_stack__ansibletemplateui__ansible_ee_image_default: "{{ + docker_stack__registry_endpoint }}/ansible-execution-env" +__docker_stack__ansibletemplateui__ansible_ee_image: "{{ + docker_stack__ansibletemplateui__docker_ansible_ee_image + | d(__docker_stack__ansibletemplateui__ansible_ee_image_default) }}" + +## docker registry +__docker_stack__ansibletemplateui__cred_docker_registry_username: "{{ + docker_stack__ansibletemplateui__cred_docker_registry_admin_username | d('username') }}" + +__docker_stack__ansibletemplateui__cred_docker_registry_password: "{{ + docker_stack__ansibletemplateui__cred_docker_registry_admin_password | d('password') }}" + +__docker_stack__ansibletemplateui__secrets: + - name: docker_registry_username + value: "{{ __docker_stack__ansibletemplateui__cred_docker_registry_username }}" + - name: docker_registry_password + value: "{{ __docker_stack__ansibletemplateui__cred_docker_registry_password }}" + __docker_stack__ansibletemplateui__config_dirs: - - { path: "{{ docker_stack__dir }}/ansibletemplateui" } + - path: "{{ __docker_stack__ansibletemplateui__base_dir }}" + - path: "{{ __docker_stack__ansibletemplateui__base_dir }}/data" + +__docker_stack__ansibletemplateui__config_tpls: + - src: 'ansible_template_ui/ansible-template-ui.env.j2' + dest: "{{ __docker_stack__ansibletemplateui__base_dir }}/ansible-template-ui.env" __docker_stack__ansibletemplateui__firewalld_ports: - "{{ docker_stack__ansibletemplateui_http_port }}/tcp" @@ -20,13 +52,17 @@ __docker_stack__ansibletemplateui__services: ansible-template-ui: image: "{{ __docker_stack__ansibletemplateui__image }}" restart: "unless-stopped" - environment: "{{ __docker_stack__ansibletemplateui__environment | d(omit) }}" + env_file: + - ansible_template_ui/ansible-template-ui.env + secrets: + - docker_registry_username + - docker_registry_password # restart: always networks: - "{{ docker_stack__traefik_proxy_network }}" volumes: - "/var/run/docker.sock:/var/run/docker.sock:ro" - - "{{ docker_stack__dir }}/ansibletemplateui:/data" + - "{{ __docker_stack__ansibletemplateui__base_dir }}/data:/data" ports: - "{{ __docker_stack__ansibletemplateui__http_port }}:8080" labels: "{{ __docker_stack__ansibletemplateui__traefik_labels }}" @@ -44,6 +80,7 @@ docker_stack__appspec__ansibletemplateui: dirs: "{{ __docker_stack__ansibletemplateui__config_dirs | d([]) }}" files: "{{ __docker_stack__ansibletemplateui__config_files | d([]) }}" templates: "{{ __docker_stack__ansibletemplateui__config_tpls | d([]) }}" + secrets: "{{ __docker_stack__ansibletemplateui__secrets | d([]) }}" firewalld_services: "{{ __docker_stack__ansibletemplateui__firewalld_services | d([]) }}" firewalld_ports: "{{ __docker_stack__ansibletemplateui__firewalld_ports | d([]) }}" networks: "{{ __docker_stack__ansibletemplateui__networks | d({}) }}" diff --git a/roles/docker-stack/vars/app-services/common/docker_stack_base.yml b/roles/docker-stack/vars/app-services/common/docker_stack_base.yml index a4a609631..b5256dd83 100644 --- a/roles/docker-stack/vars/app-services/common/docker_stack_base.yml +++ b/roles/docker-stack/vars/app-services/common/docker_stack_base.yml @@ -142,10 +142,10 @@ __docker_stack__base__networks_default: # attachable: true socket_proxy: - driver: bridge - external: true -# driver: overlay -# attachable: true + attachable: true + driver: overlay +# driver: bridge +# external: true #__docker_stack__traefik__networks: "{{ __docker_stack__base__networks_default | combine( { docker_stack_proxy_network: { 'external': true } } ) }}" __docker_stack__base__networks: "{{ (docker_stack__base_networks | d(__docker_stack__base__networks_default)) diff --git a/roles/docker-stack/vars/app-services/common/docker_stack_jenkins_jcac.yml b/roles/docker-stack/vars/app-services/common/docker_stack_jenkins_jcac.yml index 4121cbb9e..18d92df56 100644 --- a/roles/docker-stack/vars/app-services/common/docker_stack_jenkins_jcac.yml +++ b/roles/docker-stack/vars/app-services/common/docker_stack_jenkins_jcac.yml @@ -1,6 +1,7 @@ --- -__docker_stack__jenkins_jcac__environment: "{{ docker_stack__jenkins_jcac__environment | d(docker_stack__environment) }}" +__docker_stack__jenkins_jcac__environment: "{{ docker_stack__jenkins_jcac__environment + | d(docker_stack__environment) }}" #__docker_stack__jenkins_jcac__ansible_tool_python_interpreter_default: "python3" #__docker_stack__jenkins_jcac__ansible_tool_python_interpreter_default: "python3.10" @@ -11,7 +12,8 @@ __docker_stack__jenkins_jcac__ansible_tool_python_interpreter: "{{ __docker_stack__jenkins_jcac__stack_dir: "{{ docker_stack__dir | d('/home/user/docker-dirs') }}" __docker_stack__jenkins_jcac__base_dir_default: "{{ __docker_stack__jenkins_jcac__stack_dir }}/jenkins_jcac" -__docker_stack__jenkins_jcac__base_dir: "{{ docker_stack__jenkins_jcac__dir | d(__docker_stack__jenkins_jcac__base_dir_default) }}" +__docker_stack__jenkins_jcac__base_dir: "{{ docker_stack__jenkins_jcac__dir + | d(__docker_stack__jenkins_jcac__base_dir_default) }}" __docker_stack__jenkins_jcac__traefik_host_default: "jenkins.example.int" __docker_stack__jenkins_jcac__host: "{{ docker_stack__jenkins_jcac__host @@ -33,28 +35,37 @@ __docker_stack__jenkins_jcac__agent_port: "{{ docker_stack__jenkins_jcac__agent_ __docker_stack__jenkins_jcac__agent_tcp_port: "{{ docker_stack__jenkins_jcac__tcp_port | d('9000') }}" __docker_stack__jenkins_jcac__agent_ssh_port: "{{ docker_stack__jenkins_jcac__ssh_port | d('4444') }}" -__docker_stack__jenkins_jcac__mgr_pwd_secret: "{{ docker_stack__jenkins_jcac__mgr_pwd_secret | d('00000000000000000000000') }}" +__docker_stack__jenkins_jcac__mgr_pwd_secret: "{{ docker_stack__jenkins_jcac__mgr_pwd_secret + | d('00000000000000000000000') }}" -__docker_stack__jenkins_jcac__agent_username: "{{ docker_stack__jenkins_jcac__agent_username | d('00000000000000000000000') }}" -__docker_stack__jenkins_jcac__agent_password: "{{ docker_stack__jenkins_jcac__agent_password | d('00000000000000000000000') }}" +__docker_stack__jenkins_jcac__agent_username: "{{ docker_stack__jenkins_jcac__agent_username + | d('00000000000000000000000') }}" +__docker_stack__jenkins_jcac__agent_password: "{{ docker_stack__jenkins_jcac__agent_password + | d('00000000000000000000000') }}" __docker_stack__jenkins_jcac__agent_swarm_mode: yes ## ref: https://www.jenkins.io/blog/2020/05/11/docker-windows-agents/ #__docker_stack__jenkins_jcac__image_default: "jenkins/jenkins:lts" __docker_stack__jenkins_jcac__image_default: "{{ docker_stack__registry_endpoint }}/docker-jenkins-jcac:latest" -__docker_stack__jenkins_jcac__image: "{{ docker_stack__jenkins_jcac__image | d(__docker_stack__jenkins_jcac__image_default) }}" +__docker_stack__jenkins_jcac__image: "{{ docker_stack__jenkins_jcac__image + | d(__docker_stack__jenkins_jcac__image_default) }}" -__docker_stack__jenkins_jcac__agent_image_default: "{{ docker_stack__registry_endpoint }}/jenkins-docker-cicd-agent:latest" -__docker_stack__jenkins_jcac__agent_image: "{{ docker_stack__jenkins_jcac__agent_image | d(__docker_stack__jenkins_jcac__agent_image_default) }}" +__docker_stack__jenkins_jcac__agent_image_default: "{{ + docker_stack__registry_endpoint }}/jenkins-docker-cicd-agent:latest" +__docker_stack__jenkins_jcac__agent_image: "{{ docker_stack__jenkins_jcac__agent_image + | d(__docker_stack__jenkins_jcac__agent_image_default) }}" -__docker_stack__jenkins_jcac__swarm_agent_image_default: "{{ docker_stack__registry_endpoint }}/jenkins-swarm-agent:latest" +__docker_stack__jenkins_jcac__swarm_agent_image_default: "{{ + docker_stack__registry_endpoint }}/jenkins-swarm-agent:latest" __docker_stack__jenkins_jcac__swarm_agent_image: "{{ docker_stack__jenkins_swarm_agent_image | d(__docker_stack__jenkins_jcac__swarm_agent_image_default) }}" __docker_stack__jenkins_jcac__ldap_host: "{{ docker_stack__jenkins_jcac__ldap_host | d(docker_stack__ldap_host)}}" -__docker_stack__jenkins_jcac__ldap_uri: "{{ docker_stack__jenkins_jcac__ldap_uri | d(docker_stack__ldap_uri) | d(docker_stack__ldap_host) }}" -__docker_stack__jenkins_jcac__ldap_base_dn: "{{ docker_stack__jenkins_jcac__ldap_base_dn | d(docker_stack__ldap_base_dn)}}" +__docker_stack__jenkins_jcac__ldap_uri: "{{ docker_stack__jenkins_jcac__ldap_uri + | d(docker_stack__ldap_uri) | d(docker_stack__ldap_host) }}" +__docker_stack__jenkins_jcac__ldap_base_dn: "{{ docker_stack__jenkins_jcac__ldap_base_dn + | d(docker_stack__ldap_base_dn)}}" __docker_stack__jenkins_jcac__ldap_bind_user_dn_default: "cn=readonly,{{ __docker_stack__jenkins_jcac__ldap_base_dn | d(docker_stack__ldap_user_base_dn) }}" @@ -69,7 +80,8 @@ __docker_stack__jenkins_jcac__ldap_bind_user_password: "{{ docker_stack__jenkins #__docker_stack__jenkins_jcac__data_dir_default: /export/data/jenkins/osimages #__docker_stack__jenkins_jcac__data_dir_default: /export/data/jenkins __docker_stack__jenkins_jcac__data_dir_default: /data/datacenter/jenkins -__docker_stack__jenkins_jcac__data_dir: "{{ docker_stack__jenkins_data_dir | d(__docker_stack__jenkins_jcac__data_dir_default) }}" +__docker_stack__jenkins_jcac__data_dir: "{{ docker_stack__jenkins_data_dir + | d(__docker_stack__jenkins_jcac__data_dir_default) }}" __docker_stack__jenkins_jcac__pipeline_lib_repo_default: "https://gitea.example.int/infra/pipeline-automation-lib.git" __docker_stack__jenkins_jcac__pipeline_lib_repo: "{{ docker_stack__jenkins_jcac__pipeline_lib_repo @@ -127,43 +139,64 @@ __docker_stack__jenkins__ansible_venv_libs: "{{ docker_stack__jenkins__ansible_v ######### ## Credential vars -__docker_stack__jenkins_jcac__cred_ansible_vault_password: "{{ docker_stack__jenkins_jcac__cred_ansible_vault_password | d('CHANGEME123') }}" +__docker_stack__jenkins_jcac__cred_ansible_vault_password: "{{ + docker_stack__jenkins_jcac__cred_ansible_vault_password | d('CHANGEME123') }}" -__docker_stack__jenkins_jcac__cred_bitbucket_ssh_username: "{{ docker_stack__jenkins_jcac__cred_bitbucket_ssh_username | d('CHANGEME123') }}" -__docker_stack__jenkins_jcac__cred_bitbucket_ssh_private_key: "{{ docker_stack__jenkins_jcac__cred_bitbucket_ssh_private_key | d('CHANGEME123') }}" +__docker_stack__jenkins_jcac__cred_bitbucket_ssh_username: "{{ + docker_stack__jenkins_jcac__cred_bitbucket_ssh_username | d('CHANGEME123') }}" +__docker_stack__jenkins_jcac__cred_bitbucket_ssh_private_key: "{{ + docker_stack__jenkins_jcac__cred_bitbucket_ssh_private_key | d('CHANGEME123') }}" -__docker_stack__jenkins_jcac__cred_ansible_ssh_key: "{{ docker_stack__jenkins_jcac__cred_ansible_ssh_key | d('CHANGEME123') }}" +__docker_stack__jenkins_jcac__cred_ansible_ssh_key: "{{ + docker_stack__jenkins_jcac__cred_ansible_ssh_key | d('CHANGEME123') }}" -__docker_stack__jenkins_jcac__cred_ansible_ssh_username: "{{ docker_stack__jenkins_jcac__cred_ansible_ssh_username | d('CHANGEME123') }}" -__docker_stack__jenkins_jcac__cred_ansible_ssh_password: "{{ docker_stack__jenkins_jcac__cred_ansible_ssh_password | d('CHANGEME123') }}" +__docker_stack__jenkins_jcac__cred_ansible_ssh_username: "{{ + docker_stack__jenkins_jcac__cred_ansible_ssh_username | d('CHANGEME123') }}" +__docker_stack__jenkins_jcac__cred_ansible_ssh_password: "{{ + docker_stack__jenkins_jcac__cred_ansible_ssh_password | d('CHANGEME123') }}" ## docker registry -__docker_stack__jenkins_jcac__cred_docker_registry_admin_username: "{{ docker_stack__jenkins_jcac__cred_docker_registry_admin_username | d('admin') }}" -__docker_stack__jenkins_jcac__cred_docker_registry_admin_password: "{{ docker_stack__jenkins_jcac__cred_docker_registry_admin_password | d('CHANGEME123') }}" - -__docker_stack__jenkins_jcac__cred_jenkins_admin_user_username: "{{ docker_stack__jenkins_jcac__cred_jenkins_admin_user_username | d('admin') }}" -__docker_stack__jenkins_jcac__cred_jenkins_admin_user_password: "{{ docker_stack__jenkins_jcac__cred_jenkins_admin_user_password | d('CHANGEME123') }}" -__docker_stack__jenkins_jcac__cred_jenkins_git_user_password: "{{ docker_stack__jenkins_jcac__cred_jenkins_git_user_password | d('CHANGEME123') }}" - -__docker_stack__jenkins_jcac__cred_vsphere_username: "{{ docker_stack__jenkins_jcac__cred_vsphere_username | d('CHANGEME123') }}" -__docker_stack__jenkins_jcac__cred_vsphere_password: "{{ docker_stack__jenkins_jcac__cred_vsphere_password | d('CHANGEME123') }}" - -__docker_stack__jenkins_jcac__cred_esxi_password: "{{ docker_stack__jenkins_jcac__cred_esxi_password | d('CHANGEME123') }}" +__docker_stack__jenkins_jcac__cred_docker_registry_admin_username: "{{ + docker_stack__jenkins_jcac__cred_docker_registry_admin_username | d('admin') }}" +__docker_stack__jenkins_jcac__cred_docker_registry_admin_password: "{{ + docker_stack__jenkins_jcac__cred_docker_registry_admin_password | d('CHANGEME123') }}" + +__docker_stack__jenkins_jcac__cred_jenkins_admin_user_username: "{{ + docker_stack__jenkins_jcac__cred_jenkins_admin_user_username | d('admin') }}" +__docker_stack__jenkins_jcac__cred_jenkins_admin_user_password: "{{ + docker_stack__jenkins_jcac__cred_jenkins_admin_user_password | d('CHANGEME123') }}" +__docker_stack__jenkins_jcac__cred_jenkins_git_user_password: "{{ + docker_stack__jenkins_jcac__cred_jenkins_git_user_password | d('CHANGEME123') }}" + +__docker_stack__jenkins_jcac__cred_vsphere_username: "{{ + docker_stack__jenkins_jcac__cred_vsphere_username | d('CHANGEME123') }}" +__docker_stack__jenkins_jcac__cred_vsphere_password: "{{ + docker_stack__jenkins_jcac__cred_vsphere_password | d('CHANGEME123') }}" + +__docker_stack__jenkins_jcac__cred_esxi_password: "{{ + docker_stack__jenkins_jcac__cred_esxi_password | d('CHANGEME123') }}" #__docker_stack__jenkins_jcac__cred_packer_user_username_default: packer __docker_stack__jenkins_jcac__cred_packer_user_username_default: osbuild __docker_stack__jenkins_jcac__cred_packer_user_username: "{{ docker_stack__jenkins_jcac__cred_packer_user_username | d(__docker_stack__jenkins_jcac__cred_packer_user_username_default) }}" -#__docker_stack__jenkins_jcac__cred_packer_ssh_password: "{{ docker_stack__jenkins_jcac__cred_packer_ssh_password | d('CHANGEME123') }}" -__docker_stack__jenkins_jcac__cred_packer_user_password: "{{ docker_stack__jenkins_jcac__cred_packer_user_password | d('CHANGEME123') }}" - -__docker_stack__jenkins_jcac__cred_vm_root_password: "{{ docker_stack__jenkins_jcac__cred_vm_root_password | d('CHANGEME123') }}" -__docker_stack__jenkins_jcac__cred_github_username: "{{ docker_stack__jenkins_jcac__cred_github_username | d('CHANGEME123') }}" -__docker_stack__jenkins_jcac__cred_github_password: "{{ docker_stack__jenkins_jcac__cred_github_password | d('CHANGEME123') }}" - -__docker_stack__jenkins_jcac__cred_bitbucket_cloud_oauth_key: "{{ docker_stack__jenkins_jcac__cred_bitbucket_cloud_oauth_key | d('CHANGEME123') }}" -__docker_stack__jenkins_jcac__cred_bitbucket_cloud_oauth_token: "{{ docker_stack__jenkins_jcac__cred_bitbucket_cloud_oauth_token | d('CHANGEME123') }}" +#__docker_stack__jenkins_jcac__cred_packer_ssh_password: "{{ +# docker_stack__jenkins_jcac__cred_packer_ssh_password | d('CHANGEME123') }}" +__docker_stack__jenkins_jcac__cred_packer_user_password: "{{ + docker_stack__jenkins_jcac__cred_packer_user_password | d('CHANGEME123') }}" + +__docker_stack__jenkins_jcac__cred_vm_root_password: "{{ + docker_stack__jenkins_jcac__cred_vm_root_password | d('CHANGEME123') }}" +__docker_stack__jenkins_jcac__cred_github_username: "{{ + docker_stack__jenkins_jcac__cred_github_username | d('CHANGEME123') }}" +__docker_stack__jenkins_jcac__cred_github_password: "{{ + docker_stack__jenkins_jcac__cred_github_password | d('CHANGEME123') }}" + +__docker_stack__jenkins_jcac__cred_bitbucket_cloud_oauth_key: "{{ + docker_stack__jenkins_jcac__cred_bitbucket_cloud_oauth_key | d('CHANGEME123') }}" +__docker_stack__jenkins_jcac__cred_bitbucket_cloud_oauth_token: "{{ + docker_stack__jenkins_jcac__cred_bitbucket_cloud_oauth_token | d('CHANGEME123') }}" __docker_stack__jenkins_jcac__secrets: - name: ldap_username diff --git a/roles/fetch-os-images/tasks/fetch-os-image.yml b/roles/fetch-os-images/tasks/fetch-os-image.yml index fb372c6ea..06c2c6eb7 100644 --- a/roles/fetch-os-images/tasks/fetch-os-image.yml +++ b/roles/fetch-os-images/tasks/fetch-os-image.yml @@ -38,7 +38,7 @@ ## ref: https://tldp.org/HOWTO/Debian-Jigdo/faq.html#WGETOPTIONS ## ref: https://askubuntu.com/questions/1226707/how-to-throttle-jigdo-lites-download-rate - name: "Get JIGDO image for {{ __fetch_iso_config.name }}" - command: "jigdo-lite --noask {{ __fetch_iso_config.iso_url }}" + ansible.builtin.command: "jigdo-lite --noask {{ __fetch_iso_config.iso_url }}" args: chdir: "{{ fetch_os_images__osimage_dir }}" creates: "{{ fetch_os_images__osimage_dir }}/{{ __fetch_iso_config.iso_file }}" diff --git a/roles/geerlingguy.git/tasks/install-from-source.yml b/roles/geerlingguy.git/tasks/install-from-source.yml index e4ebec790..279174645 100644 --- a/roles/geerlingguy.git/tasks/install-from-source.yml +++ b/roles/geerlingguy.git/tasks/install-from-source.yml @@ -24,7 +24,7 @@ state: present - name: Get installed version. - command: git --version + ansible.builtin.command: git --version changed_when: false failed_when: false check_mode: false @@ -53,7 +53,7 @@ when: (git_installed_version.rc != 0) or (git_reinstall_from_source | bool) - name: Build git. - command: > + ansible.builtin.command: > make prefix={{ git_install_path }} {{ item }} chdir={{ workspace }}/git-{{ git_version }} with_items: diff --git a/roles/ipa-sssd/tasks/main.yml b/roles/ipa-sssd/tasks/main.yml index f2d27bbd3..a5239c907 100644 --- a/roles/ipa-sssd/tasks/main.yml +++ b/roles/ipa-sssd/tasks/main.yml @@ -23,7 +23,7 @@ force: yes #- name: Enable and start sssd -# service: +# ansible.builtin.service: # name: sssd # state: restarted # enabled: yes diff --git a/roles/mergerfs/handlers/main.yml b/roles/mergerfs/handlers/main.yml index 38f2277cb..e6a3e6d5b 100644 --- a/roles/mergerfs/handlers/main.yml +++ b/roles/mergerfs/handlers/main.yml @@ -1,18 +1,18 @@ --- - name: systemd daemon-reload - command: systemctl daemon-reload + ansible.builtin.command: systemctl daemon-reload - name: start samba service - service: + ansible.builtin.service: name: smbd state: started - name: restart samba service - service: + ansible.builtin.service: name: smbd state: restarted - name: restart telegraf - service: + ansible.builtin.service: name: telegraf state: restarted \ No newline at end of file diff --git a/roles/mergerfs/tasks/config-mergerfs-disks.yml b/roles/mergerfs/tasks/config-mergerfs-disks.yml index b9720c7f8..36c302a0b 100644 --- a/roles/mergerfs/tasks/config-mergerfs-disks.yml +++ b/roles/mergerfs/tasks/config-mergerfs-disks.yml @@ -1,7 +1,7 @@ --- - name: create /mnt points - file: + ansible.builtin.file: dest: "{{ item.path }}" state: directory owner: nobody diff --git a/roles/mergerfs/tasks/file-sharing.yml b/roles/mergerfs/tasks/file-sharing.yml index 8f3ff236b..897448d00 100644 --- a/roles/mergerfs/tasks/file-sharing.yml +++ b/roles/mergerfs/tasks/file-sharing.yml @@ -1,7 +1,7 @@ --- - name: install samba - apt: + ansible.builtin.apt: pkg: samba state: present notify: diff --git a/roles/mergerfs/tasks/install-mergerfs-tools.yml b/roles/mergerfs/tasks/install-mergerfs-tools.yml index 3e564d1a0..068bc5028 100644 --- a/roles/mergerfs/tasks/install-mergerfs-tools.yml +++ b/roles/mergerfs/tasks/install-mergerfs-tools.yml @@ -3,13 +3,13 @@ ## ref: https://github.com/xattr/xattr/blob/master/INSTALLING.txt ## ref: - name: "Install xattr" - pip: + ansible.builtin.pip: name: "xattr" state: present # extra_args: --user - name: check to see if mergerfs-tools is already installed - command: "mergerfs.ctl info" + ansible.builtin.command: "mergerfs.ctl info" ignore_errors: true register: mergerfs_tools_is_installed changed_when: false @@ -19,7 +19,7 @@ block: - name: "Check for existing mergerfs-tools src dir" - stat: + ansible.builtin.stat: path: "{{ mergerfs_tools_src_dir }}" register: mergerfs_tools_path # ignore_errors: true @@ -27,7 +27,7 @@ ## ref: https://github.com/trapexit/mergerfs-tools/issues/47#issuecomment-333294102 - name: mergerfs-tools | clone git repo when: not mergerfs_tools_path.stat.exists - git: + ansible.builtin.git: repo: https://github.com/trapexit/mergerfs-tools dest: "{{ mergerfs_tools_src_dir }}" force: yes diff --git a/roles/netbootxyz/netbootxyz-overrides.yml b/roles/netbootxyz/netbootxyz-overrides.yml index 1e5b9c640..d0f0c07c0 100644 --- a/roles/netbootxyz/netbootxyz-overrides.yml +++ b/roles/netbootxyz/netbootxyz-overrides.yml @@ -16,4 +16,4 @@ ipxe_ca_url: http://ca.ipxe.org/ca.crt ipxe_ca_filename: ca-ipxe-org.crt codesign_cert_filename: codesign.crt codesign_key_filename: codesign.key -cert_file_filename : ca-netboot-xyz.crt +cert_file_filename: ca-netboot-xyz.crt diff --git a/roles/netbootxyz/tasks/generate_checksums.yml b/roles/netbootxyz/tasks/generate_checksums.yml index 87326f8aa..4b87cd875 100644 --- a/roles/netbootxyz/tasks/generate_checksums.yml +++ b/roles/netbootxyz/tasks/generate_checksums.yml @@ -1,18 +1,18 @@ --- - name: Register a listing of all created iPXE bootloaders - command: ls -I {{ checksums_filename }} {{ netbootxyz_root }}/ipxe/ + ansible.builtin.command: ls -I {{ checksums_filename }} {{ netbootxyz_root }}/ipxe/ register: netboot_disks tags: - skip_ansible_lint - name: Generate date - command: date + ansible.builtin.command: date register: current_date tags: - skip_ansible_lint - name: Gather stat listing of directory - command: sha256sum -b {{ item }} + ansible.builtin.command: sha256sum -b {{ item }} with_items: - "{{ netboot_disks.stdout_lines }}" args: diff --git a/roles/netbootxyz/tasks/generate_signatures.yml b/roles/netbootxyz/tasks/generate_signatures.yml index d55b5f049..99204664c 100644 --- a/roles/netbootxyz/tasks/generate_signatures.yml +++ b/roles/netbootxyz/tasks/generate_signatures.yml @@ -1,6 +1,6 @@ --- - name: Gather list of source files - command: ls {{ netbootxyz_root }} + ansible.builtin.command: ls {{ netbootxyz_root }} register: source_files tags: - skip_ansible_lint diff --git a/roles/pip/tasks/main.yml b/roles/pip/tasks/main.yml index 0e842cbf2..a41c0557d 100644 --- a/roles/pip/tasks/main.yml +++ b/roles/pip/tasks/main.yml @@ -7,13 +7,13 @@ - "default.yml" - name: Install Virtualenv - pip: + ansible.builtin.pip: name: "virtualenv" extra_args: --user - name: Install common pip libs when: (required_pip_libs is defined and not(required_pip_libs is none)) - pip: + ansible.builtin.pip: name: "{{ required_pip_libs }}" state: present extra_args: --user diff --git a/roles/rsyncd_conf/tasks/execute_sync.yml b/roles/rsyncd_conf/tasks/execute_sync.yml index 295be31e9..78d5cbefc 100644 --- a/roles/rsyncd_conf/tasks/execute_sync.yml +++ b/roles/rsyncd_conf/tasks/execute_sync.yml @@ -32,7 +32,7 @@ - name: Synchronize Files from {{ source_host }} to {{ remote_host }} block: - name: Execute File Sync # noqa command-instead-of-module - command: + ansible.builtin.command: cmd: | rsync -av --delete-before --rsync-path='sudo rsync' diff --git a/roles/rsyncd_conf/tasks/main.yml b/roles/rsyncd_conf/tasks/main.yml index 29c82ffbf..73ceddd5f 100644 --- a/roles/rsyncd_conf/tasks/main.yml +++ b/roles/rsyncd_conf/tasks/main.yml @@ -16,7 +16,7 @@ with_items: "{{ rsync_out.results }}" - name: "Backup the rsyncd.conf" -# command: "cp /etc/rsyncd.conf /etc/rsyncd.conf.bkup" +# ansible.builtin.command: "cp /etc/rsyncd.conf /etc/rsyncd.conf.bkup" ansible.builtin.copy: remote_src: yes src: "/etc/rsyncd.conf" @@ -49,12 +49,12 @@ delegate_to: "{{ remote_host }}" - name: "Run rsync commnad" - command: "rsync -avz --delete-before {{ local_filesystem_path }} {{ remote_host }}::files" + ansible.builtin.command: "rsync -avz --delete-before {{ local_filesystem_path }} {{ remote_host }}::files" become: yes delegate_to: "{{ source_host }}" - name: "Restore the backup of the rsyncd.conf" -# command: "mv /etc/rsyncd.conf.bkup /etc/rsyncd.conf" +# ansible.builtin.command: "mv /etc/rsyncd.conf.bkup /etc/rsyncd.conf" ansible.builtin.copy: remote_src: yes src: "/etc/rsyncd.conf.bkup" diff --git a/roles/rsyncd_conf/tests/diff-source-and-remote.yml b/roles/rsyncd_conf/tests/diff-source-and-remote.yml index 0c534be53..54b7507c9 100644 --- a/roles/rsyncd_conf/tests/diff-source-and-remote.yml +++ b/roles/rsyncd_conf/tests/diff-source-and-remote.yml @@ -4,7 +4,7 @@ delegate_to: "{{ source_host }}" block: - - command: find . -type f + - ansible.builtin.command: find . -type f args: chdir: "{{ local_filesystem_path }}" register: source_shell_result @@ -17,7 +17,7 @@ delegate_to: "{{ remote_host }}" block: - - command: find . -type f + - ansible.builtin.command: find . -type f args: chdir: "{{ remote_filesystem_path }}" register: remote_shell_result diff --git a/roles/rsyncd_conf/tests/main.yml b/roles/rsyncd_conf/tests/main.yml index 8c397b9a4..270ecfa36 100644 --- a/roles/rsyncd_conf/tests/main.yml +++ b/roles/rsyncd_conf/tests/main.yml @@ -30,7 +30,7 @@ with_items: "{{ rsync_out.results }}" #- name: backup the rsyncd.conf -# command: "cp /etc/rsyncd.conf /etc/rsyncd.conf.bkup" +# ansible.builtin.command: "cp /etc/rsyncd.conf /etc/rsyncd.conf.bkup" # when: item.status == false # delegate_to: "{{ item.ip }}" # ignore_errors: yes @@ -62,7 +62,7 @@ delegate_to: "{{ remote_host }}" - name: run rsync commnad -# command: "rsync -avz --delete-before {{ local_filesystem_path }} {{ remote_host }}::files" +# ansible.builtin.command: "rsync -avz --delete-before {{ local_filesystem_path }} {{ remote_host }}::files" ansible.builtin.shell: > rsync -arP --rsync-path 'sudo -u root rsync' @@ -78,7 +78,7 @@ ansible.builtin.include_tasks: diff-source-and-remote.yml #- name: restore the backup of the rsyncd.conf -# command: "mv /etc/rsyncd.conf.bkup /etc/rsyncd.conf" +# ansible.builtin.command: "mv /etc/rsyncd.conf.bkup /etc/rsyncd.conf" # when: item.status == false # delegate_to: "{{ item.ip }}" # loop: "{{ rsync_dict }}" diff --git a/roles/samba-client/handlers/main.yml b/roles/samba-client/handlers/main.yml index acd785d82..230a1de48 100644 --- a/roles/samba-client/handlers/main.yml +++ b/roles/samba-client/handlers/main.yml @@ -1,7 +1,7 @@ --- - name: reload firewalld - command: firewall-cmd --reload + ansible.builtin.command: firewall-cmd --reload - name: restart firewalld service: diff --git a/roles/solrcloud/tasks/collections.yml b/roles/solrcloud/tasks/collections.yml index 5508f4b68..c8419e90e 100644 --- a/roles/solrcloud/tasks/collections.yml +++ b/roles/solrcloud/tasks/collections.yml @@ -34,7 +34,7 @@ when: solr_collections_transfer_mode == "copy" - name: SolrCloud | Upload initial config - command: > + ansible.builtin.command: > {{ solr_zookeeper_client_path }}/zkcli.sh -zkhost {{ solr_zookeeper_hosts }}/{{ solr_zookeeper_hosts_solr_path }} -cmd upconfig diff --git a/roles/solrcloud/tasks/config.yml b/roles/solrcloud/tasks/config.yml index ef06a66fd..7528ac7f6 100644 --- a/roles/solrcloud/tasks/config.yml +++ b/roles/solrcloud/tasks/config.yml @@ -6,7 +6,7 @@ mode: "0744" - name: SolrCloud | Check Root Path (znode) in ZooKeeper - command: > + ansible.builtin.command: > {{ solr_zookeeper_client_path }}/zkcli.sh -zkhost {{ solr_zookeeper_hosts }} -cmd list /{{ solr_zookeeper_hosts_solr_path }} @@ -14,7 +14,7 @@ changed_when: false - name: SolrCloud | Create Root Path (znode) in ZooKeeper using zkcli script - command: > + ansible.builtin.command: > {{ solr_zookeeper_client_path }}/zkcli.sh -zkhost {{ solr_zookeeper_hosts }} -cmd makepath /{{ solr_zookeeper_hosts_solr_path }} @@ -79,7 +79,7 @@ when: solr_external_libraries is defined and solr_external_libraries|length > 0 - name: SolrCloud | Force handlers - meta: flush_handlers + ansible.builtin.meta: flush_handlers - name: SolrCloud | Wait for SolrCloud to fully startup before continue uri: diff --git a/roles/solrcloud/tasks/install.yml b/roles/solrcloud/tasks/install.yml index 297396d31..a87729ae4 100644 --- a/roles/solrcloud/tasks/install.yml +++ b/roles/solrcloud/tasks/install.yml @@ -33,7 +33,7 @@ - "{{ solr_collections_config_tmp_dir }}" - name: SolrCloud | Check if service is running - command: systemctl status solr.service + ansible.builtin.command: systemctl status solr.service ignore_errors: true changed_when: false register: service_solrcloud_status diff --git a/roles/zlib/tasks/main.yml b/roles/zlib/tasks/main.yml index 6b99b1713..bcc217b2c 100644 --- a/roles/zlib/tasks/main.yml +++ b/roles/zlib/tasks/main.yml @@ -2,14 +2,14 @@ ## ref: https://osric.com/chris/accidental-developer/2018/03/using-ansible-to-check-version-before-install-or-upgrade/ - name: "Check for existing zlib install" tags: libz - stat: + ansible.builtin.stat: path: "{{ path_to_libz }}" register: libz_path # ignore_errors: true - name: "Check for existing zlib tmp dir" tags: libz - stat: + ansible.builtin.stat: path: "{{ zlib_dir }}" register: libz_src_dir_path # ignore_errors: true @@ -29,7 +29,7 @@ # when: "zlib_version.stdout.find ('\"{{ libz_target_version }}\"') == -1" when: "(not libz_path.stat.exists or libz_target_version not in libz_version.stdout) and not libz_src_dir_path.stat.exists" tags: libz - unarchive: + ansible.builtin.unarchive: src: "{{ zlib_url }}" dest: "{{ zlib_tmp }}" remote_src: True diff --git a/roles/zookeeper/dockerhub/deploy.yml b/roles/zookeeper/dockerhub/deploy.yml index 14f85fb22..c0368bb73 100644 --- a/roles/zookeeper/dockerhub/deploy.yml +++ b/roles/zookeeper/dockerhub/deploy.yml @@ -17,7 +17,7 @@ privileged: true - name: Add container to in-memory inventory - add_host: + ansible.builtin.add_host: name: "zookeeper" ansible_connection: docker @@ -34,7 +34,7 @@ gather_facts: false tasks: - name: Commit container file changes to new image - command: docker commit \ + ansible.builtin.command: docker commit \ zookeeper idealista/zookeeper - name: Log into Docker Hub @@ -44,7 +44,7 @@ password: "{{ docker_hub_password }}" - name: Tag and push to Docker Hub - command: docker push idealista/zookeeper:latest + ansible.builtin.command: docker push idealista/zookeeper:latest - name: Remove the container docker_container: diff --git a/roles/zookeeper/handlers/main.yml b/roles/zookeeper/handlers/main.yml index b21d8deec..c5f7f11d2 100644 --- a/roles/zookeeper/handlers/main.yml +++ b/roles/zookeeper/handlers/main.yml @@ -1,7 +1,7 @@ --- - name: Restart zookeeper - systemd: + ansible.builtin.systemd: name: zookeeper state: restarted when: zookeeper_service_state != 'stopped'