From a8ba8e44e5dc7146ab22aa424c67f46a23299af1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Matej=20=C5=A0tajduhar?= <30931414+matej5@users.noreply.github.com> Date: Thu, 4 Sep 2025 16:49:48 +0200 Subject: [PATCH 01/26] Fixing-email-title-for-backup-validation (#2657) Co-authored-by: Matej Stajduhar --- .../aws/aws_backup_validation/templates/validation_report.py.j2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/roles/aws/aws_backup_validation/templates/validation_report.py.j2 b/roles/aws/aws_backup_validation/templates/validation_report.py.j2 index bc80f32a7..7501db7ca 100644 --- a/roles/aws/aws_backup_validation/templates/validation_report.py.j2 +++ b/roles/aws/aws_backup_validation/templates/validation_report.py.j2 @@ -126,7 +126,7 @@ failed_job = backup_cli.list_restore_jobs( }, 'Subject': { 'Charset': 'UTF-8', - 'Data': 'Restore testing - {{ _aws_profile }}: ' + mail_title, + 'Data': 'Restore testing - {{ _infra_name }}: ' + mail_title, }, }, Source='Lambda Backup Validation ', From 057138d196ffc97cbdf7a821362cea2d2d0a74c8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Matej=20=C5=A0tajduhar?= <30931414+matej5@users.noreply.github.com> Date: Mon, 8 Sep 2025 13:56:58 +0200 Subject: [PATCH 02/26] Adding-task-to-create-aurora-cluster (#2659) * Adding-task-to-create-aurora-cluster * Adding-region-profile-and-tags-to-aurora-cluster * Updating-engine-for-aurora-cluster * Updating-parameter-group-engine * Updating-engine-version * Updating-engine-version-2 * Disabling-automated-backups * Disabling-automated-backups-2 * Disabling-automated-backups-3 * Disabling-automated-backups-4 * Skipping-task-if-not-aurora * Adding-subnet-group-to-instances * Adding-subnet-group-to-instances * Updating-SG-return-values * Updating-SG-return-values-2 * Updating-SG-return-values-3 * Updating-SG-return-values-4 * Updating-SG-return-value-debug * Updating-SG-return-value-debug-2 * Updating-SG-return-value-debug-3 * Removing-debug-tasks * Removing-init-var-for-SG-list * Adding-character-set-option --------- Co-authored-by: Matej Stajduhar --- roles/aws/aws_rds/tasks/main.yml | 41 +++++++++++++++++++++++--------- 1 file changed, 30 insertions(+), 11 deletions(-) diff --git a/roles/aws/aws_rds/tasks/main.yml b/roles/aws/aws_rds/tasks/main.yml index 977e9959c..28aff345a 100644 --- a/roles/aws/aws_rds/tasks/main.yml +++ b/roles/aws/aws_rds/tasks/main.yml @@ -23,12 +23,40 @@ - aws_rds.db_parameters is defined - aws_rds.db_parameters | length > 0 +- name: Generate security group information. + ansible.builtin.include_role: + name: aws/aws_security_groups + vars: + aws_security_groups: + profile: "{{ aws_rds.aws_profile }}" + region: "{{ aws_rds.region }}" + group_names: "{{ aws_rds.security_groups }}" + return_type: ids + when: aws_rds.security_groups | length > 0 + +- name: Create Aurora cluster. + amazon.aws.rds_cluster: + profile: "{{ aws_rds.aws_profile }}" + region: "{{ aws_rds.region }}" + cluster_id: "{{ aws_rds.name }}" + engine: "{{ aws_rds.engine }}" + engine_version: "{{ aws_rds.engine_version }}" + username: "{{ aws_rds.master_username }}" + password: "{{ aws_rds.master_user_password }}" + db_subnet_group_name: "{{ aws_rds.name }}" + vpc_security_group_ids: "{{ _aws_security_group_list }}" + backup_retention_period: "{{ aws_rds.backup_retention_period | default(35) }}" + character_set_name: "{{ aws_rds.character_set_name | default(omit) }}" + tags: "{{ aws_rds.tags | combine({'Name': aws_rds.name}) }}" + when: "'aurora' in aws_rds.engine" + - name: Create Aurora RDS instance. amazon.aws.rds_instance: db_instance_identifier: "{{ aws_rds.name }}-{{ aws_rds.aurora_suffix }}" db_instance_class: "{{ aws_rds.db_instance_class }}" db_cluster_identifier: "{{ aws_rds.db_cluster_identifier | default(aws_rds.name) }}" db_parameter_group_name: "{{ aws_rds.db_parameter_group_name | default(omit) }}" + db_subnet_group_name: "{{ aws_rds.name }}" state: "{{ aws_rds.state }}" engine: "{{ aws_rds.engine }}" copy_tags_to_snapshot: true @@ -49,6 +77,7 @@ db_cluster_identifier: "{{ aws_rds.db_cluster_identifier | default(aws_rds.name) }}" db_instance_class: "{{ aws_rds.db_instance_class }}" db_parameter_group_name: "{{ aws_rds.db_parameter_group_name | default(omit) }}" + db_subnet_group_name: "{{ aws_rds.name }}" state: "{{ aws_rds.state }}" engine: "{{ aws_rds.engine }}" copy_tags_to_snapshot: true @@ -64,17 +93,6 @@ - "'aurora' in aws_rds.engine" - aws_rds.aurora_reader -- name: Generate security group information. - ansible.builtin.include_role: - name: aws/aws_security_groups - vars: - aws_security_groups: - profile: "{{ aws_rds.aws_profile }}" - region: "{{ aws_rds.region }}" - group_names: "{{ aws_rds.security_groups }}" - return_type: ids - when: aws_rds.security_groups | length > 0 - - name: Create RDS instance. amazon.aws.rds_instance: profile: "{{ aws_rds.aws_profile }}" @@ -214,3 +232,4 @@ when: - aws_rds.backup is defined - aws_rds.backup | length > 0 + - "'aurora' not in aws_rds.engine" From 17115af44e2d926c84e1e8a0c797620380ee91af Mon Sep 17 00:00:00 2001 From: Greg Harvey Date: Wed, 10 Sep 2025 10:55:22 +0200 Subject: [PATCH 03/26] Fixing installer variable bug. --- install.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/install.sh b/install.sh index b74950a87..c8c8b8db4 100755 --- a/install.sh +++ b/install.sh @@ -210,7 +210,7 @@ ce_provision: venv_install_username: ${CONTROLLER_USER} upgrade_timer_name: upgrade_ce_provision_ansible aws_support: ${AWS_SUPPORT} - new_user: ${CONTROLLER_USER} + new_user: true username: ${CONTROLLER_USER} ssh_key_bits: "521" ssh_key_type: ed25519 From 176df44f01cc27a353dc28eae8d5f4da2521421f Mon Sep 17 00:00:00 2001 From: Greg Harvey Date: Wed, 10 Sep 2025 11:11:21 +0200 Subject: [PATCH 04/26] Fixing tests for external PRs. --- .github/workflows/ce-provision-test-gitlab.yml | 2 +- .github/workflows/ce-provision-test-web.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ce-provision-test-gitlab.yml b/.github/workflows/ce-provision-test-gitlab.yml index 9e5b46a30..566ea1377 100644 --- a/.github/workflows/ce-provision-test-gitlab.yml +++ b/.github/workflows/ce-provision-test-gitlab.yml @@ -23,7 +23,7 @@ jobs: steps: - name: Install ce-provision run: | - /usr/bin/curl -LO https://raw.githubusercontent.com/codeenigma/ce-provision/${{ github.event.pull_request.head.ref }}/install.sh + /usr/bin/curl -LO https://raw.githubusercontent.com/${{ github.event.pull_request.head.repo.owner.login }}/ce-provision/${{ github.event.pull_request.head.ref }}/install.sh /usr/bin/chmod +x ./install.sh /usr/bin/sudo ./install.sh --version ${{ github.event.pull_request.head.ref }} --config-branch ${{ github.event.pull_request.base.ref }} --docker --no-firewall diff --git a/.github/workflows/ce-provision-test-web.yml b/.github/workflows/ce-provision-test-web.yml index 595905064..c8ae20bbe 100644 --- a/.github/workflows/ce-provision-test-web.yml +++ b/.github/workflows/ce-provision-test-web.yml @@ -23,7 +23,7 @@ jobs: steps: - name: Install ce-provision run: | - /usr/bin/curl -LO https://raw.githubusercontent.com/codeenigma/ce-provision/${{ github.event.pull_request.head.ref }}/install.sh + /usr/bin/curl -LO https://raw.githubusercontent.com/${{ github.event.pull_request.head.repo.owner.login }}/ce-provision/${{ github.event.pull_request.head.ref }}/install.sh /usr/bin/chmod +x ./install.sh /usr/bin/sudo ./install.sh --version ${{ github.event.pull_request.head.ref }} --config-branch ${{ github.event.pull_request.base.ref }} --docker --no-firewall From 24845320411170dc5ac23d90c05837de38183dbe Mon Sep 17 00:00:00 2001 From: Greg Harvey Date: Wed, 10 Sep 2025 11:15:22 +0200 Subject: [PATCH 05/26] Testing with a fork. --- .github/workflows/ce-provision-test-gitlab.yml | 2 +- .github/workflows/ce-provision-test-web.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ce-provision-test-gitlab.yml b/.github/workflows/ce-provision-test-gitlab.yml index 566ea1377..9d6670617 100644 --- a/.github/workflows/ce-provision-test-gitlab.yml +++ b/.github/workflows/ce-provision-test-gitlab.yml @@ -23,7 +23,7 @@ jobs: steps: - name: Install ce-provision run: | - /usr/bin/curl -LO https://raw.githubusercontent.com/${{ github.event.pull_request.head.repo.owner.login }}/ce-provision/${{ github.event.pull_request.head.ref }}/install.sh + /usr/bin/curl -LO https://raw.githubusercontent.com/${{ github.event.pull_request.head.repo.name }}/${{ github.event.pull_request.head.ref }}/install.sh /usr/bin/chmod +x ./install.sh /usr/bin/sudo ./install.sh --version ${{ github.event.pull_request.head.ref }} --config-branch ${{ github.event.pull_request.base.ref }} --docker --no-firewall diff --git a/.github/workflows/ce-provision-test-web.yml b/.github/workflows/ce-provision-test-web.yml index c8ae20bbe..509d0e2d3 100644 --- a/.github/workflows/ce-provision-test-web.yml +++ b/.github/workflows/ce-provision-test-web.yml @@ -23,7 +23,7 @@ jobs: steps: - name: Install ce-provision run: | - /usr/bin/curl -LO https://raw.githubusercontent.com/${{ github.event.pull_request.head.repo.owner.login }}/ce-provision/${{ github.event.pull_request.head.ref }}/install.sh + /usr/bin/curl -LO https://raw.githubusercontent.com/${{ github.event.pull_request.head.repo.name }}/${{ github.event.pull_request.head.ref }}/install.sh /usr/bin/chmod +x ./install.sh /usr/bin/sudo ./install.sh --version ${{ github.event.pull_request.head.ref }} --config-branch ${{ github.event.pull_request.base.ref }} --docker --no-firewall From cde9a6037638f76b2e2615146fb2550eaaf4d820 Mon Sep 17 00:00:00 2001 From: Greg Harvey Date: Wed, 10 Sep 2025 11:18:41 +0200 Subject: [PATCH 06/26] Adding repo owner's username into installer string. --- .github/workflows/ce-provision-test-gitlab.yml | 2 +- .github/workflows/ce-provision-test-web.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ce-provision-test-gitlab.yml b/.github/workflows/ce-provision-test-gitlab.yml index 9d6670617..6da7b5a4b 100644 --- a/.github/workflows/ce-provision-test-gitlab.yml +++ b/.github/workflows/ce-provision-test-gitlab.yml @@ -23,7 +23,7 @@ jobs: steps: - name: Install ce-provision run: | - /usr/bin/curl -LO https://raw.githubusercontent.com/${{ github.event.pull_request.head.repo.name }}/${{ github.event.pull_request.head.ref }}/install.sh + /usr/bin/curl -LO https://raw.githubusercontent.com/${{ github.event.pull_request.head.repo.owner.login }}/${{ github.event.pull_request.head.repo.name }}/${{ github.event.pull_request.head.ref }}/install.sh /usr/bin/chmod +x ./install.sh /usr/bin/sudo ./install.sh --version ${{ github.event.pull_request.head.ref }} --config-branch ${{ github.event.pull_request.base.ref }} --docker --no-firewall diff --git a/.github/workflows/ce-provision-test-web.yml b/.github/workflows/ce-provision-test-web.yml index 509d0e2d3..e95bf6337 100644 --- a/.github/workflows/ce-provision-test-web.yml +++ b/.github/workflows/ce-provision-test-web.yml @@ -23,7 +23,7 @@ jobs: steps: - name: Install ce-provision run: | - /usr/bin/curl -LO https://raw.githubusercontent.com/${{ github.event.pull_request.head.repo.name }}/${{ github.event.pull_request.head.ref }}/install.sh + /usr/bin/curl -LO https://raw.githubusercontent.com/${{ github.event.pull_request.head.repo.owner.login }}/${{ github.event.pull_request.head.repo.name }}/${{ github.event.pull_request.head.ref }}/install.sh /usr/bin/chmod +x ./install.sh /usr/bin/sudo ./install.sh --version ${{ github.event.pull_request.head.ref }} --config-branch ${{ github.event.pull_request.base.ref }} --docker --no-firewall From a2c4bac692aa51a4eee16f268988fc7cdfe42a4c Mon Sep 17 00:00:00 2001 From: Greg Harvey Date: Wed, 10 Sep 2025 11:35:23 +0200 Subject: [PATCH 07/26] Refactoring config repo detection to simplify. --- roles/debian/ce_provision/tasks/main.yml | 13 ++++--------- 1 file changed, 4 insertions(+), 9 deletions(-) diff --git a/roles/debian/ce_provision/tasks/main.yml b/roles/debian/ce_provision/tasks/main.yml index 792eea3d6..0ccd6e680 100644 --- a/roles/debian/ce_provision/tasks/main.yml +++ b/roles/debian/ce_provision/tasks/main.yml @@ -57,11 +57,6 @@ filename: "{{ ce_provision.username }}" when: _ce_provision_username != ce_provision.username -# This prevent the original var to be re-evaluated when we move things around. -- name: Register config repository. - ansible.builtin.set_fact: - ce_provision_has_config_repo: "{{ 'yes' if ce_provision.config_repository else 'no' }}" - - name: Ensure APT dependencies are installed. ansible.builtin.apt: pkg: ["git", "parallel"] @@ -102,7 +97,7 @@ become: true become_user: "{{ ce_provision.username }}" when: - - ce_provision_has_config_repo + - ce_provision.config_repository | length > 0 - not ce_provision.config_repository_skip_checkout - name: Create defaults folders. @@ -111,13 +106,13 @@ state: directory with_items: - hosts - when: not ce_provision_has_config_repo + when: not ce_provision.config_repository | length > 0 - name: Create default config. ansible.builtin.copy: src: ansible.cfg dest: "{{ ce_provision.local_dir }}/ansible.cfg" - when: not ce_provision_has_config_repo + when: not ce_provision.config_repository | length > 0 - name: Symlink config folders to /etc/ansible. ansible.builtin.file: @@ -129,7 +124,7 @@ - files - templates - ansible.cfg - when: ce_provision_has_config_repo + when: ce_provision.config_repository | length > 0 - name: Create data dir. ansible.builtin.file: From 7ce204b56cfe7039f4a462fa97be46a824cd7fad Mon Sep 17 00:00:00 2001 From: Greg Harvey Date: Wed, 10 Sep 2025 11:48:58 +0200 Subject: [PATCH 08/26] No longer permitted to use an integer as a truthy value. --- roles/debian/user_ansible/tasks/main.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/roles/debian/user_ansible/tasks/main.yml b/roles/debian/user_ansible/tasks/main.yml index 93290f410..5f138f26a 100644 --- a/roles/debian/user_ansible/tasks/main.yml +++ b/roles/debian/user_ansible/tasks/main.yml @@ -13,7 +13,7 @@ with_items: "{{ user_ansible.groups }}" loop_control: loop_var: group - when: user_ansible.groups | length + when: user_ansible.groups | length > 0 - name: Create the system user. ansible.builtin.user: @@ -74,7 +74,7 @@ owner: "{{ user_ansible.username }}" group: "{{ user_ansible.username }}" mode: '0600' - when: user_ansible.known_hosts | length + when: user_ansible.known_hosts | length > 0 - name: Add public keys to known_hosts. ansible.builtin.known_hosts: From c7ae00387857c07e59e8ef8a9e74d0c3a1dee172 Mon Sep 17 00:00:00 2001 From: Greg Harvey Date: Wed, 10 Sep 2025 11:56:53 +0200 Subject: [PATCH 09/26] No longer permitted to use existence check as a truthy value. --- roles/_init/tasks/main.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/roles/_init/tasks/main.yml b/roles/_init/tasks/main.yml index c401fefb3..82668f566 100644 --- a/roles/_init/tasks/main.yml +++ b/roles/_init/tasks/main.yml @@ -121,9 +121,9 @@ - name: Load custom vars file. ansible.builtin.include_tasks: allowed_vars.yml when: - - _init.ce_provision_extra_repository - - _init.ce_provision_extra_repository_vars_file - - _init.ce_provision_extra_repository_allowed_vars + - _init.ce_provision_extra_repository | length > 0 + - _init.ce_provision_extra_repository_vars_file | length > 0 + - _init.ce_provision_extra_repository_allowed_vars | length > 0 # Install Ansible under the controller user for all servers # Ensure ansible_connection == 'ssh' (i.e. we are connecting to a server) before executing From 6379b2e39df24121df90b8a6fae8e7a1faf9f22e Mon Sep 17 00:00:00 2001 From: Greg Harvey Date: Wed, 10 Sep 2025 12:27:22 +0200 Subject: [PATCH 10/26] Can't see a reason why linotp var shouldn't be a boolean. --- roles/debian/apt_unattended_upgrades/defaults/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/roles/debian/apt_unattended_upgrades/defaults/main.yml b/roles/debian/apt_unattended_upgrades/defaults/main.yml index 855c7f924..a63f3e140 100644 --- a/roles/debian/apt_unattended_upgrades/defaults/main.yml +++ b/roles/debian/apt_unattended_upgrades/defaults/main.yml @@ -4,7 +4,7 @@ _apt_unattended_upgrades_default_origins: - "origin=Debian,codename=${distro_codename}-security,label=Debian-Security" apt_unattended_upgrades: enable: true - linotp: "false" + linotp: false # unattended-upgrades template vars. # booleans must be strings to avoid Jinja2 interpretting. origins: "{{ _apt_unattended_upgrades_default_origins }}" From 318f532d6e145c044d901f90b3f3b5572df22de5 Mon Sep 17 00:00:00 2001 From: Greg Harvey Date: Wed, 10 Sep 2025 12:38:29 +0200 Subject: [PATCH 11/26] No longer permitted to use existence check as a truthy value. --- roles/_exit/tasks/main.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/roles/_exit/tasks/main.yml b/roles/_exit/tasks/main.yml index 51d676278..b9dce908d 100644 --- a/roles/_exit/tasks/main.yml +++ b/roles/_exit/tasks/main.yml @@ -3,8 +3,8 @@ - name: Generate/Update custom vars file. ansible.builtin.include_tasks: allowed_vars.yml when: - - _init.ce_provision_extra_repository - - _init.ce_provision_extra_repository_vars_file + - _init.ce_provision_extra_repository | length > 0 + - _init.ce_provision_extra_repository_vars_file | length > 0 - _init.ce_provision_extra_repository_push - name: Store current playbook md5. From 1466d24f87123b195af6e77b2f4ea755a9e68704 Mon Sep 17 00:00:00 2001 From: Greg Harvey Date: Wed, 10 Sep 2025 12:47:27 +0200 Subject: [PATCH 12/26] Fixing truthy errors in ce_deploy role. --- roles/debian/ce_deploy/tasks/main.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/roles/debian/ce_deploy/tasks/main.yml b/roles/debian/ce_deploy/tasks/main.yml index 15f2265dd..526d2bf86 100644 --- a/roles/debian/ce_deploy/tasks/main.yml +++ b/roles/debian/ce_deploy/tasks/main.yml @@ -62,7 +62,7 @@ version: "{{ ce_deploy.config_repository_branch | default('main') }}" become: false delegate_to: localhost - when: ce_deploy.config_repository is defined and ce_deploy.config_repository + when: ce_deploy.config_repository is defined and ce_deploy.config_repository | length > 0 - name: Synchronize config directory. ansible.posix.synchronize: @@ -71,7 +71,7 @@ delete: true rsync_opts: - "--chown={{ ce_deploy.username }}:{{ ce_deploy.username }}" - when: ce_deploy.config_repository is defined and ce_deploy.config_repository + when: ce_deploy.config_repository is defined and ce_deploy.config_repository | length > 0 - name: Check if we have a config directory. ansible.builtin.stat: @@ -81,7 +81,7 @@ - name: Register config repository. ansible.builtin.set_fact: key_value: ce_deploy_has_config_repo - ce_deploy_has_config_repo: "{{ 'yes' if ce_deploy_config_repo.stat.isdir is defined and ce_deploy_config_repo.stat.isdir else 'no' }}" + ce_deploy_has_config_repo: "{{ true if ce_deploy_config_repo.stat.isdir is defined and ce_deploy_config_repo.stat.isdir else false }}" - name: Create defaults folders. ansible.builtin.file: From 3c14dfa005a2ece232f3ea65574d8666e93d2fef Mon Sep 17 00:00:00 2001 From: Greg Harvey Date: Wed, 10 Sep 2025 12:58:45 +0200 Subject: [PATCH 13/26] No longer permitted to use an integer as a truthy value. --- roles/debian/ssh_server/tasks/main.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/roles/debian/ssh_server/tasks/main.yml b/roles/debian/ssh_server/tasks/main.yml index 8d52d8eee..47c07ed41 100644 --- a/roles/debian/ssh_server/tasks/main.yml +++ b/roles/debian/ssh_server/tasks/main.yml @@ -18,7 +18,7 @@ with_items: "{{ sshd.groups }}" loop_control: loop_var: group - when: sshd.groups | length + when: sshd.groups | length > 0 - name: Generate group section of the sshd_config file. ansible.builtin.blockinfile: @@ -29,7 +29,7 @@ with_items: "{{ sshd.groups }}" loop_control: loop_var: group - when: sshd.groups | length + when: sshd.groups | length > 0 - name: Generate user section of the sshd_config file. ansible.builtin.blockinfile: @@ -40,7 +40,7 @@ with_items: "{{ sshd.users }}" loop_control: loop_var: users - when: sshd.users | length + when: sshd.users | length > 0 # - name: Trigger overrides # include_role: From 2b30a7848829b2eae2e82c9871888e97741520ea Mon Sep 17 00:00:00 2001 From: Greg Harvey Date: Wed, 10 Sep 2025 13:23:01 +0200 Subject: [PATCH 14/26] Fixing truthy variable mistakes. (#2662) * Fixing installer variable bug. * Fixing tests for external PRs. * Testing with a fork. * Adding repo owner's username into installer string. * Refactoring config repo detection to simplify. * No longer permitted to use an integer as a truthy value. * No longer permitted to use existence check as a truthy value. * Can't see a reason why linotp var shouldn't be a boolean. * No longer permitted to use existence check as a truthy value. * Fixing truthy errors in ce_deploy role. * No longer permitted to use an integer as a truthy value. --- .github/workflows/ce-provision-test-gitlab.yml | 2 +- .github/workflows/ce-provision-test-web.yml | 2 +- install.sh | 2 +- roles/_exit/tasks/main.yml | 4 ++-- roles/_init/tasks/main.yml | 6 +++--- .../apt_unattended_upgrades/defaults/main.yml | 2 +- roles/debian/ce_deploy/tasks/main.yml | 6 +++--- roles/debian/ce_provision/tasks/main.yml | 13 ++++--------- roles/debian/ssh_server/tasks/main.yml | 6 +++--- roles/debian/user_ansible/tasks/main.yml | 4 ++-- 10 files changed, 21 insertions(+), 26 deletions(-) diff --git a/.github/workflows/ce-provision-test-gitlab.yml b/.github/workflows/ce-provision-test-gitlab.yml index 9e5b46a30..6da7b5a4b 100644 --- a/.github/workflows/ce-provision-test-gitlab.yml +++ b/.github/workflows/ce-provision-test-gitlab.yml @@ -23,7 +23,7 @@ jobs: steps: - name: Install ce-provision run: | - /usr/bin/curl -LO https://raw.githubusercontent.com/codeenigma/ce-provision/${{ github.event.pull_request.head.ref }}/install.sh + /usr/bin/curl -LO https://raw.githubusercontent.com/${{ github.event.pull_request.head.repo.owner.login }}/${{ github.event.pull_request.head.repo.name }}/${{ github.event.pull_request.head.ref }}/install.sh /usr/bin/chmod +x ./install.sh /usr/bin/sudo ./install.sh --version ${{ github.event.pull_request.head.ref }} --config-branch ${{ github.event.pull_request.base.ref }} --docker --no-firewall diff --git a/.github/workflows/ce-provision-test-web.yml b/.github/workflows/ce-provision-test-web.yml index 595905064..e95bf6337 100644 --- a/.github/workflows/ce-provision-test-web.yml +++ b/.github/workflows/ce-provision-test-web.yml @@ -23,7 +23,7 @@ jobs: steps: - name: Install ce-provision run: | - /usr/bin/curl -LO https://raw.githubusercontent.com/codeenigma/ce-provision/${{ github.event.pull_request.head.ref }}/install.sh + /usr/bin/curl -LO https://raw.githubusercontent.com/${{ github.event.pull_request.head.repo.owner.login }}/${{ github.event.pull_request.head.repo.name }}/${{ github.event.pull_request.head.ref }}/install.sh /usr/bin/chmod +x ./install.sh /usr/bin/sudo ./install.sh --version ${{ github.event.pull_request.head.ref }} --config-branch ${{ github.event.pull_request.base.ref }} --docker --no-firewall diff --git a/install.sh b/install.sh index b74950a87..c8c8b8db4 100755 --- a/install.sh +++ b/install.sh @@ -210,7 +210,7 @@ ce_provision: venv_install_username: ${CONTROLLER_USER} upgrade_timer_name: upgrade_ce_provision_ansible aws_support: ${AWS_SUPPORT} - new_user: ${CONTROLLER_USER} + new_user: true username: ${CONTROLLER_USER} ssh_key_bits: "521" ssh_key_type: ed25519 diff --git a/roles/_exit/tasks/main.yml b/roles/_exit/tasks/main.yml index 51d676278..b9dce908d 100644 --- a/roles/_exit/tasks/main.yml +++ b/roles/_exit/tasks/main.yml @@ -3,8 +3,8 @@ - name: Generate/Update custom vars file. ansible.builtin.include_tasks: allowed_vars.yml when: - - _init.ce_provision_extra_repository - - _init.ce_provision_extra_repository_vars_file + - _init.ce_provision_extra_repository | length > 0 + - _init.ce_provision_extra_repository_vars_file | length > 0 - _init.ce_provision_extra_repository_push - name: Store current playbook md5. diff --git a/roles/_init/tasks/main.yml b/roles/_init/tasks/main.yml index c401fefb3..82668f566 100644 --- a/roles/_init/tasks/main.yml +++ b/roles/_init/tasks/main.yml @@ -121,9 +121,9 @@ - name: Load custom vars file. ansible.builtin.include_tasks: allowed_vars.yml when: - - _init.ce_provision_extra_repository - - _init.ce_provision_extra_repository_vars_file - - _init.ce_provision_extra_repository_allowed_vars + - _init.ce_provision_extra_repository | length > 0 + - _init.ce_provision_extra_repository_vars_file | length > 0 + - _init.ce_provision_extra_repository_allowed_vars | length > 0 # Install Ansible under the controller user for all servers # Ensure ansible_connection == 'ssh' (i.e. we are connecting to a server) before executing diff --git a/roles/debian/apt_unattended_upgrades/defaults/main.yml b/roles/debian/apt_unattended_upgrades/defaults/main.yml index 855c7f924..a63f3e140 100644 --- a/roles/debian/apt_unattended_upgrades/defaults/main.yml +++ b/roles/debian/apt_unattended_upgrades/defaults/main.yml @@ -4,7 +4,7 @@ _apt_unattended_upgrades_default_origins: - "origin=Debian,codename=${distro_codename}-security,label=Debian-Security" apt_unattended_upgrades: enable: true - linotp: "false" + linotp: false # unattended-upgrades template vars. # booleans must be strings to avoid Jinja2 interpretting. origins: "{{ _apt_unattended_upgrades_default_origins }}" diff --git a/roles/debian/ce_deploy/tasks/main.yml b/roles/debian/ce_deploy/tasks/main.yml index 15f2265dd..526d2bf86 100644 --- a/roles/debian/ce_deploy/tasks/main.yml +++ b/roles/debian/ce_deploy/tasks/main.yml @@ -62,7 +62,7 @@ version: "{{ ce_deploy.config_repository_branch | default('main') }}" become: false delegate_to: localhost - when: ce_deploy.config_repository is defined and ce_deploy.config_repository + when: ce_deploy.config_repository is defined and ce_deploy.config_repository | length > 0 - name: Synchronize config directory. ansible.posix.synchronize: @@ -71,7 +71,7 @@ delete: true rsync_opts: - "--chown={{ ce_deploy.username }}:{{ ce_deploy.username }}" - when: ce_deploy.config_repository is defined and ce_deploy.config_repository + when: ce_deploy.config_repository is defined and ce_deploy.config_repository | length > 0 - name: Check if we have a config directory. ansible.builtin.stat: @@ -81,7 +81,7 @@ - name: Register config repository. ansible.builtin.set_fact: key_value: ce_deploy_has_config_repo - ce_deploy_has_config_repo: "{{ 'yes' if ce_deploy_config_repo.stat.isdir is defined and ce_deploy_config_repo.stat.isdir else 'no' }}" + ce_deploy_has_config_repo: "{{ true if ce_deploy_config_repo.stat.isdir is defined and ce_deploy_config_repo.stat.isdir else false }}" - name: Create defaults folders. ansible.builtin.file: diff --git a/roles/debian/ce_provision/tasks/main.yml b/roles/debian/ce_provision/tasks/main.yml index 792eea3d6..0ccd6e680 100644 --- a/roles/debian/ce_provision/tasks/main.yml +++ b/roles/debian/ce_provision/tasks/main.yml @@ -57,11 +57,6 @@ filename: "{{ ce_provision.username }}" when: _ce_provision_username != ce_provision.username -# This prevent the original var to be re-evaluated when we move things around. -- name: Register config repository. - ansible.builtin.set_fact: - ce_provision_has_config_repo: "{{ 'yes' if ce_provision.config_repository else 'no' }}" - - name: Ensure APT dependencies are installed. ansible.builtin.apt: pkg: ["git", "parallel"] @@ -102,7 +97,7 @@ become: true become_user: "{{ ce_provision.username }}" when: - - ce_provision_has_config_repo + - ce_provision.config_repository | length > 0 - not ce_provision.config_repository_skip_checkout - name: Create defaults folders. @@ -111,13 +106,13 @@ state: directory with_items: - hosts - when: not ce_provision_has_config_repo + when: not ce_provision.config_repository | length > 0 - name: Create default config. ansible.builtin.copy: src: ansible.cfg dest: "{{ ce_provision.local_dir }}/ansible.cfg" - when: not ce_provision_has_config_repo + when: not ce_provision.config_repository | length > 0 - name: Symlink config folders to /etc/ansible. ansible.builtin.file: @@ -129,7 +124,7 @@ - files - templates - ansible.cfg - when: ce_provision_has_config_repo + when: ce_provision.config_repository | length > 0 - name: Create data dir. ansible.builtin.file: diff --git a/roles/debian/ssh_server/tasks/main.yml b/roles/debian/ssh_server/tasks/main.yml index 8d52d8eee..47c07ed41 100644 --- a/roles/debian/ssh_server/tasks/main.yml +++ b/roles/debian/ssh_server/tasks/main.yml @@ -18,7 +18,7 @@ with_items: "{{ sshd.groups }}" loop_control: loop_var: group - when: sshd.groups | length + when: sshd.groups | length > 0 - name: Generate group section of the sshd_config file. ansible.builtin.blockinfile: @@ -29,7 +29,7 @@ with_items: "{{ sshd.groups }}" loop_control: loop_var: group - when: sshd.groups | length + when: sshd.groups | length > 0 - name: Generate user section of the sshd_config file. ansible.builtin.blockinfile: @@ -40,7 +40,7 @@ with_items: "{{ sshd.users }}" loop_control: loop_var: users - when: sshd.users | length + when: sshd.users | length > 0 # - name: Trigger overrides # include_role: diff --git a/roles/debian/user_ansible/tasks/main.yml b/roles/debian/user_ansible/tasks/main.yml index 93290f410..5f138f26a 100644 --- a/roles/debian/user_ansible/tasks/main.yml +++ b/roles/debian/user_ansible/tasks/main.yml @@ -13,7 +13,7 @@ with_items: "{{ user_ansible.groups }}" loop_control: loop_var: group - when: user_ansible.groups | length + when: user_ansible.groups | length > 0 - name: Create the system user. ansible.builtin.user: @@ -74,7 +74,7 @@ owner: "{{ user_ansible.username }}" group: "{{ user_ansible.username }}" mode: '0600' - when: user_ansible.known_hosts | length + when: user_ansible.known_hosts | length > 0 - name: Add public keys to known_hosts. ansible.builtin.known_hosts: From cb636682cd8b8a28d4081948cf1bbe7e1dcf0312 Mon Sep 17 00:00:00 2001 From: Klaus Purer Date: Wed, 10 Sep 2025 13:55:24 +0200 Subject: [PATCH 15/26] feat(php): Add FPM slow logrotate (#2625) * feat(php): Support removal of APCU, add FPM slow logrotate * simplify condition * revert apcu installed setting, not needed From 9f05b904fd7c1e12a7d0a9ec6c457d8faa592946 Mon Sep 17 00:00:00 2001 From: nfawbert <62660788+nfawbert@users.noreply.github.com> Date: Wed, 10 Sep 2025 12:57:51 +0100 Subject: [PATCH 16/26] r73458-install-php-gmp-by-default2 (#2667) * r73458-install-php-gmp-by-default2 * re-add required packages --- roles/debian/php-common/tasks/main.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/roles/debian/php-common/tasks/main.yml b/roles/debian/php-common/tasks/main.yml index 15ff896a6..d9f5ac786 100644 --- a/roles/debian/php-common/tasks/main.yml +++ b/roles/debian/php-common/tasks/main.yml @@ -41,9 +41,12 @@ - name: Install PHP packages. ansible.builtin.apt: pkg: + - "php{{ version }}-apcu" + - "php{{ version }}-bcmath" - "php{{ version }}-curl" - "php{{ version }}-dev" - "php{{ version }}-gd" + - "php{{ version }}-gmp" - "php{{ version }}-imap" - "php{{ version }}-ldap" - "php{{ version }}-mbstring" @@ -54,8 +57,6 @@ - "php{{ version }}-soap" - "php{{ version }}-xml" - "php{{ version }}-zip" - - "php{{ version }}-bcmath" - - "php{{ version }}-apcu" state: present with_items: "{{ php.version }}" loop_control: From a89ae4537c013302ee6b1868d05e814c04c6ac98 Mon Sep 17 00:00:00 2001 From: drazenCE <140631110+drazenCE@users.noreply.github.com> Date: Thu, 11 Sep 2025 07:53:22 +0200 Subject: [PATCH 17/26] Wazuh-mitre-report-setup (#2588) * Wazuh-mitre-report-setup * Wazuh-mitre-shellshock-longurl-block * Fixing-vars * Wazuh-mitre-report-setup-PR-2.x --- roles/debian/wazuh/defaults/main.yml | 2 +- roles/debian/wazuh/tasks/main.yml | 52 ++++++++++++++++--- .../templates/generate_weekly_report.sh.j2 | 46 ++++++++++++++++ 3 files changed, 92 insertions(+), 8 deletions(-) create mode 100644 roles/debian/wazuh/templates/generate_weekly_report.sh.j2 diff --git a/roles/debian/wazuh/defaults/main.yml b/roles/debian/wazuh/defaults/main.yml index c98a57e03..fd28d0ffe 100644 --- a/roles/debian/wazuh/defaults/main.yml +++ b/roles/debian/wazuh/defaults/main.yml @@ -92,7 +92,7 @@ wazuh: active_responses: - command: "firewall-drop" location: "all" - rules_id: "31151,5712,104130,101071,101132,101238,101251,103011" + rules_id: "31115,31151,31168,5712,104130,101071,101132,101238,101251,103011" repeated_offenders: "30,60,120" timeout: 600 - command: "firewall-drop" diff --git a/roles/debian/wazuh/tasks/main.yml b/roles/debian/wazuh/tasks/main.yml index 808b9b77d..e37e70b29 100644 --- a/roles/debian/wazuh/tasks/main.yml +++ b/roles/debian/wazuh/tasks/main.yml @@ -139,6 +139,10 @@ ignore_errors: true changed_when: false +- name: Set fact if wazuh-manager service exists + ansible.builtin.set_fact: + wazuh_manager_exists: "{{ 'wazuh-manager.service' in wazuh_service.stdout }}" + - name: Deploy custom Wazuh local rules ansible.builtin.copy: src: custom_wazuh_rules.xml @@ -149,7 +153,7 @@ notify: restart wazuh-manager tags: - rules - when: "'wazuh-manager.service' in wazuh_service.stdout" + when: wazuh_manager_exists - name: Write the password to /var/ossec/etc/authd.pass ansible.builtin.copy: @@ -158,16 +162,50 @@ mode: '0640' owner: root group: wazuh - when: "'wazuh-manager.service' in wazuh_service.stdout or 'wazuh-agent.service' in wazuh_service.stdout" + when: wazuh_manager_exists or 'wazuh-agent.service' in wazuh_service.stdout -- name: Restart wazuh-manager to apply changes - ansible.builtin.systemd_service: +- name: Restart wazuh-manager to apply changes. + ansible.builtin.systemd: name: wazuh-manager state: restarted - when: "'wazuh-manager.service' in wazuh_service.stdout" + when: wazuh_manager_exists -- name: Restart wazuh-agent to apply changes - ansible.builtin.systemd_service: +- name: Restart wazuh-agent to apply changes. + ansible.builtin.systemd: name: wazuh-agent state: restarted when: "'wazuh-agent.service' in wazuh_service.stdout" + +- name: Read filebeat.yml content. + ansible.builtin.shell: | + set -o pipefail && awk -F'"' '/password:/ {print $2}' {{ wazuh.mitre_report.password_file }} + register: _wazuh_filebeat_password + no_log: true + args: + executable: /bin/bash + when: wazuh_manager_exists + +- name: Set password fact. + ansible.builtin.set_fact: + filebeat_password: "{{ _wazuh_filebeat_password.stdout }}" + no_log: true + when: wazuh_manager_exists + +- name: Deploy the weekly report script. + ansible.builtin.template: + src: generate_weekly_report.sh.j2 + dest: /usr/local/bin/generate_weekly_report.sh + owner: root + group: root + mode: '0755' + when: wazuh_manager_exists + +- name: Ensure weekly report cron job is present. + ansible.builtin.cron: + name: "Weekly OpenSearch report generation" + user: root + minute: 0 + hour: 2 + weekday: 1 # Monday + job: "/usr/local/bin/generate_weekly_report.sh >> /var/log/opensearch-reports.log 2>&1" + when: wazuh_manager_exists diff --git a/roles/debian/wazuh/templates/generate_weekly_report.sh.j2 b/roles/debian/wazuh/templates/generate_weekly_report.sh.j2 new file mode 100644 index 000000000..eb93662e1 --- /dev/null +++ b/roles/debian/wazuh/templates/generate_weekly_report.sh.j2 @@ -0,0 +1,46 @@ +#!/bin/bash + +# This script generates a PDF report from wazuh-dashboard visualization and emails it + +# Set variables +REPORT_DATE=$(date +"%Y-%m-%d") +REPORT_NAME="weekly-report-${REPORT_DATE}" +LOG_FILE="/var/log/opensearch-reports.log" +USERNAME= {{ wazuh.mitre_report.username }} +PASSWORD= {{ _wazuh_filebeat_password }} + +# Function to log messages +log_message() { + echo "$(date '+%Y-%m-%d %H:%M:%S') - $1" | tee -a "$LOG_FILE" +} + +log_message "Starting weekly report generation" + +# Generate and send the report +opensearch-reporting-cli \ + -u "{{ wazuh.mitre_report.visualization_url }}" \ + -a basic \ + -c "$USERNAME:$PASSWORD" \ + --selfsignedcerts true \ + -f pdf \ + -n "$REPORT_NAME" \ + -e smtp \ + -s "{{ wazuh.mitre_report.e-mail_from }}" \ + -r "{{ wazuh.manager.wazuh_manager_mailto}}" \ + --subject "Weekly OpenSearch Report - $(date '+%B %d, %Y')" \ + --note "Hi,\n\nPlease find attached the weekly Wazuh Mitre report covering the last 7 days.\n\nReport generated on: $(date '+%Y-%m-%d %H:%M:%S')\n\nBest regards,\nAutomated Reporting System" \ + --smtphost localhost \ + --smtpport 25 + +# Check if the command was successful +if [ $? -eq 0 ]; then + log_message "Weekly report generated and sent successfully" +else + log_message "ERROR: Failed to generate or send weekly report" + exit 1 +fi + +# Optional: Clean up old report files (keep last 2 weeks) +find /tmp -name "weekly-report-*.pdf" -mtime +14 -delete 2>/dev/null + +log_message "Weekly report process completed" From 71278e9146b6238bc19ee274f5c8ceb9430d087b Mon Sep 17 00:00:00 2001 From: drazenCE <140631110+drazenCE@users.noreply.github.com> Date: Thu, 11 Sep 2025 09:26:35 +0200 Subject: [PATCH 18/26] Wazuh mitre report setup pr 2.x (#2669) * Wazuh-mitre-report-setup * Wazuh-mitre-shellshock-longurl-block * Fixing-vars * Wazuh-mitre-report-setup-PR-2.x * Wazuh-mitre-report-setup-PR-2.x --- roles/debian/wazuh/templates/generate_weekly_report.sh.j2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/roles/debian/wazuh/templates/generate_weekly_report.sh.j2 b/roles/debian/wazuh/templates/generate_weekly_report.sh.j2 index eb93662e1..de717209a 100644 --- a/roles/debian/wazuh/templates/generate_weekly_report.sh.j2 +++ b/roles/debian/wazuh/templates/generate_weekly_report.sh.j2 @@ -25,7 +25,7 @@ opensearch-reporting-cli \ -f pdf \ -n "$REPORT_NAME" \ -e smtp \ - -s "{{ wazuh.mitre_report.e-mail_from }}" \ + -s "{{ wazuh.mitre_report.e_mail_from }}" \ -r "{{ wazuh.manager.wazuh_manager_mailto}}" \ --subject "Weekly OpenSearch Report - $(date '+%B %d, %Y')" \ --note "Hi,\n\nPlease find attached the weekly Wazuh Mitre report covering the last 7 days.\n\nReport generated on: $(date '+%Y-%m-%d %H:%M:%S')\n\nBest regards,\nAutomated Reporting System" \ From 65a6a0dd23f3512f3b16deead555ea0504553b13 Mon Sep 17 00:00:00 2001 From: tymofiisobchenko <104431720+tymofiisobchenko@users.noreply.github.com> Date: Fri, 12 Sep 2025 16:55:54 +0300 Subject: [PATCH 19/26] pin_ansible_version (#2671) * pin_ansible_version * pin_ansible_version * pin_ansible_version * pin_ansible_version * pin_ansible_version_fix_upgrade_timer * pin_ansible_version_fix_upgrade_timer * pin_ansible_version_fix_upgrade_timer * pin_ansible_version_disable_upgrade_timer * pin_ansible_version_disable_upgrade_timer * pin_ansible_version_disable_upgrade_timer * pin_ansible_version_disable_upgrade_timer --- install.sh | 3 ++- roles/debian/ansible/defaults/main.yml | 3 ++- roles/debian/ansible/tasks/main.yml | 2 +- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/install.sh b/install.sh index c8c8b8db4..c11219cbd 100755 --- a/install.sh +++ b/install.sh @@ -84,6 +84,7 @@ FIREWALL="true" AWS_SUPPORT="false" IS_LOCAL="false" SERVER_HOSTNAME=$(hostname) +ANSIBLE_VERSION="<12" # Parse options. parse_options "$@" @@ -146,7 +147,7 @@ fi /usr/bin/echo "-------------------------------------------------" /usr/bin/su - "$CONTROLLER_USER" -c "/usr/bin/python3 -m venv /home/$CONTROLLER_USER/ce-python" /usr/bin/su - "$CONTROLLER_USER" -c "/home/$CONTROLLER_USER/ce-python/bin/python3 -m pip install --upgrade pip" -/usr/bin/su - "$CONTROLLER_USER" -c "/home/$CONTROLLER_USER/ce-python/bin/pip install ansible netaddr python-debian" +/usr/bin/su - "$CONTROLLER_USER" -c "/home/$CONTROLLER_USER/ce-python/bin/pip install 'ansible$ANSIBLE_VERSION' netaddr python-debian" if [ "$AWS_SUPPORT" = "true" ]; then /usr/bin/su - "$CONTROLLER_USER" -c "/home/$CONTROLLER_USER/ce-python/bin/pip install boto3" fi diff --git a/roles/debian/ansible/defaults/main.yml b/roles/debian/ansible/defaults/main.yml index e7256e5f8..47707d7d0 100644 --- a/roles/debian/ansible/defaults/main.yml +++ b/roles/debian/ansible/defaults/main.yml @@ -4,8 +4,9 @@ ce_ansible: #venv_path: "/home/{{ ce_provision.username }}/ansible" #venv_command: /usr/bin/python3.11 -m venv #venv_install_username: ansible # user to become when creating venv + ansible_version: "<12.0" # also check install.sh script in the repo root and set the version there accordingly. upgrade: - enabled: true # create systemd timer to auto-upgrade Ansible + enabled: false # create systemd timer to auto-upgrade Ansible. Temporary disabled due to ansible 2.19 breaking changes. command: "{{ _venv_path }}/bin/python3 -m pip install --upgrade ansible" # if you set venv_path above then set it here too on_calendar: "*-*-* 01:30:00" # see systemd.time documentation - https://www.freedesktop.org/software/systemd/man/latest/systemd.time.html#Calendar%20Events #timer_name: upgrade_ansible diff --git a/roles/debian/ansible/tasks/main.yml b/roles/debian/ansible/tasks/main.yml index 57af8cbf0..cdf6d0862 100644 --- a/roles/debian/ansible/tasks/main.yml +++ b/roles/debian/ansible/tasks/main.yml @@ -60,7 +60,7 @@ packages: - name: pip state: latest - - name: ansible + - name: "ansible{{ ce_ansible.ansible_version }}" - name: python-debian - name: Install linters. From 854a245a4c7b47690316cc1060c7432be75d8311 Mon Sep 17 00:00:00 2001 From: drazenCE <140631110+drazenCE@users.noreply.github.com> Date: Tue, 16 Sep 2025 10:11:46 +0200 Subject: [PATCH 20/26] Fixing-ce-provision-vars (#2678) --- roles/debian/ce_provision/tasks/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/roles/debian/ce_provision/tasks/main.yml b/roles/debian/ce_provision/tasks/main.yml index 0ccd6e680..6d65d25f9 100644 --- a/roles/debian/ce_provision/tasks/main.yml +++ b/roles/debian/ce_provision/tasks/main.yml @@ -16,7 +16,7 @@ with_items: "{{ ce_provision.groups }}" loop_control: loop_var: group - when: ce_provision.groups | length + when: ce_provision.groups is defined and ce_provision.groups | length > 0 # User normally created already in the _init role. - name: Generate SSH key for the controller user for provisioning. From 18502308d09a499d5a22fae282648963c2c54dd5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Matej=20=C5=A0tajduhar?= <30931414+matej5@users.noreply.github.com> Date: Wed, 17 Sep 2025 10:43:12 +0200 Subject: [PATCH 21/26] Updating-string (#2507) * Updating-string * Updating-string-3 --------- Co-authored-by: Matej Stajduhar --- roles/aws/aws_admin_tools/tasks/create_methods.yml | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/roles/aws/aws_admin_tools/tasks/create_methods.yml b/roles/aws/aws_admin_tools/tasks/create_methods.yml index e06acf07a..7dfff713e 100644 --- a/roles/aws/aws_admin_tools/tasks/create_methods.yml +++ b/roles/aws/aws_admin_tools/tasks/create_methods.yml @@ -95,14 +95,18 @@ {{ '--credentials "arn:aws:iam::' + _acc_id + ':role/api_get_s3"' if item.resource == 's3' else '' }} when: item.url_params is not defined or item.url_params | length == 0 -- name: Generate template parts for each param - set_fact: - template_parts: "{{ item.url_params | map('regex_replace', '^(.*)$', '\\\"\\1\\\": \\\"$input.params(''\\1'')\\\"') | list }}" +- name: Generate URL parameters string + ansible.builtin.set_fact: + url_params_string: >- + {% for _url in item.url_params %} + {{ '' if loop.first else ',' }} + \"{{ _url }}\": \"$input.params('{{ _url }}')\" + {% endfor %} when: item.url_params is defined and item.url_params | length > 0 - name: Create final template string set_fact: - template_string: "{ \"application/json\": \"{ {{ template_parts | join(',') }} }\" }" + template_string: "{ \"application/json\": \"{ {{ url_params_string }} }\" }" when: item.url_params is defined and item.url_params | length > 0 - name: Write template to file From d62d4e6b5e2023118839a57461b20df464543226 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Matej=20=C5=A0tajduhar?= <30931414+matej5@users.noreply.github.com> Date: Thu, 18 Sep 2025 13:33:18 +0200 Subject: [PATCH 22/26] Added-tasks-to-backup-Aurora-and-copy-AMI-to-safe-region (#2682) * Added-tasks-to-backup-Aurora-and-copy-AMI-to-safe-region * Fixing-aurora-backup-tasks * Fixing-aurora-backup-tasks-2 * Fixing-aurora-backup-tasks-3 * Fixing-aurora-backup-tasks-5 * Adding-aurora-template * Updating-aurora-vars * Adding-handler-to-defaults-for-CF --------- Co-authored-by: Matej Stajduhar --- roles/aws/aws_backup/tasks/resource.yml | 3 +- .../tasks/testing_resources.yml | 47 +++++++++++-------- .../templates/AURORA_restore_testing.j2 | 16 +++++++ .../defaults/main.yml | 1 + .../aws_ec2_autoscale_cluster/tasks/main.yml | 7 +++ roles/aws/aws_rds/tasks/main.yml | 15 ++++++ 6 files changed, 68 insertions(+), 21 deletions(-) create mode 100644 roles/aws/aws_backup_validation/templates/AURORA_restore_testing.j2 diff --git a/roles/aws/aws_backup/tasks/resource.yml b/roles/aws/aws_backup/tasks/resource.yml index 44924f6ff..c939a5805 100644 --- a/roles/aws/aws_backup/tasks/resource.yml +++ b/roles/aws/aws_backup/tasks/resource.yml @@ -52,8 +52,9 @@ instance: "ec2" file-system: "elasticfilesystem" db: "rds" + cluster: "rds" ansible.builtin.set_fact: - _resource_arn: "arn:aws:{{ arn_construct[backup.resource_type] }}:{{ _aws_region }}:{{ caller_info.account }}:{{ backup.resource_type }}{% if backup.resource_type == 'db' %}:{% else %}/{% endif %}{{ backup.resource_id }}" + _resource_arn: "arn:aws:{{ arn_construct[backup.resource_type] }}:{{ _aws_region }}:{{ caller_info.account }}:{{ backup.resource_type }}{% if backup.resource_type == 'db' or backup.resource_type == 'cluster' %}:{% else %}/{% endif %}{{ backup.resource_id }}" - name: Check if the resource selection exists. ansible.builtin.command: > diff --git a/roles/aws/aws_backup_validation/tasks/testing_resources.yml b/roles/aws/aws_backup_validation/tasks/testing_resources.yml index 56fd50eda..63c79421d 100644 --- a/roles/aws/aws_backup_validation/tasks/testing_resources.yml +++ b/roles/aws/aws_backup_validation/tasks/testing_resources.yml @@ -30,26 +30,32 @@ register: _main_subnets_info - name: Create SG for restored instances. - amazon.aws.ec2_security_group: - name: Restore_testing - description: This SG is used to allow SSM and SSH access to the server - region: "{{ _aws_region }}" - vpc_id: "{{ _main_vpc_info.vpcs[0].vpc_id }}" - rules: - - proto: tcp - from_port: 80 - to_port: 80 - cidr_ip: 0.0.0.0/0 - - proto: tcp - from_port: 443 - to_port: 443 - cidr_ip: 0.0.0.0/0 - - proto: tcp - from_port: 22 - to_port: 22 - cidr_ip: 0.0.0.0/0 - rules_egress: [] - register: _restore_testing_sg + ansible.builtin.include_role: + name: aws/aws_vpc + tasks_from: security_group + vars: + aws_vpc: + name: "Restore_testing" + region: "{{ aws_ec2_autoscale_cluster.region }}" + id: "{{ _main_vpc_info.vpcs[0].vpc_id }}" + description: "This SG is used to allow SSM and SSH access to the server" + rules: + - proto: tcp + from_port: 80 + to_port: 80 + cidr_ip: 0.0.0.0/0 + - proto: tcp + from_port: 443 + to_port: 443 + cidr_ip: 0.0.0.0/0 + - proto: tcp + from_port: 22 + to_port: 22 + cidr_ip: 0.0.0.0/0 + +- name: Construct AWS instance type dict. + ansible.builtin.set_fact: + _restore_testing_sg: "{{ aws_vpc._result['Restore_testing'] }}" - name: Remove restore testing query file. ansible.builtin.file: @@ -62,6 +68,7 @@ instance: "EC2" file-system: "EFS" db: "RDS" + cluster: "AURORA" - name: Set instance type for template. ansible.builtin.set_fact: diff --git a/roles/aws/aws_backup_validation/templates/AURORA_restore_testing.j2 b/roles/aws/aws_backup_validation/templates/AURORA_restore_testing.j2 new file mode 100644 index 000000000..1cb7e06f7 --- /dev/null +++ b/roles/aws/aws_backup_validation/templates/AURORA_restore_testing.j2 @@ -0,0 +1,16 @@ +{ + "RestoreTestingPlanName": "{{ _testing_plan_info.stdout | from_json | json_query("RestoreTestingPlanName") }}", + "RestoreTestingSelection": { + "IamRoleArn": "{{ _default_backup_role_arn.iam_roles[0].arn }}", + "ProtectedResourceArns": [ + "{{ _resource_arn }}" + ], + "ProtectedResourceType": "{{ _instance_type_restore }}", + "RestoreMetadataOverrides": { + "vpcSecurityGroupIds": "[\"{{ _restore_testing_sg.group_id }}\"]", + "dbsubnetgroupname": "{{ aws_rds.name }}" + }, + "RestoreTestingSelectionName": "{{ backup.selection_name | replace("-", "_") }}", + "ValidationWindowHours": 1 + } +} diff --git a/roles/aws/aws_cloudfront_distribution/defaults/main.yml b/roles/aws/aws_cloudfront_distribution/defaults/main.yml index f264cac35..66b17fd9d 100644 --- a/roles/aws/aws_cloudfront_distribution/defaults/main.yml +++ b/roles/aws/aws_cloudfront_distribution/defaults/main.yml @@ -12,6 +12,7 @@ aws_cloudfront_distribution: # description: "This is example function." # runtime: "nodejs22.x" # lambda runtimes can be found here https://docs.aws.amazon.com/lambda/latest/dg/lambda-runtimes.html # code: "lambda-function-example.zip" # Name of the function file in files folder next to plays and vars, can handle git URLs +# handler: "main_file.main_function" aws_profile: "{{ _aws_profile }}" region: "{{ _aws_region }}" tags: {} diff --git a/roles/aws/aws_ec2_autoscale_cluster/tasks/main.yml b/roles/aws/aws_ec2_autoscale_cluster/tasks/main.yml index e8f63de73..36ed43672 100644 --- a/roles/aws/aws_ec2_autoscale_cluster/tasks/main.yml +++ b/roles/aws/aws_ec2_autoscale_cluster/tasks/main.yml @@ -772,3 +772,10 @@ when: - aws_ec2_autoscale_cluster.route_53.zone is defined - aws_ec2_autoscale_cluster.route_53.zone | length > 0 + +- name: Copy AMI to backup region. + community.aws.ec2_ami_copy: + aws_profile: "{{ aws_ec2_autoscale_cluster.aws_profile }}" + source_region: "{{ aws_ec2_autoscale_cluster.region }}" + region: "{{ aws_backup.copy_vault.region }}" + source_image_id: "{{ aws_ec2_autoscale_cluster_image_latest }}" diff --git a/roles/aws/aws_rds/tasks/main.yml b/roles/aws/aws_rds/tasks/main.yml index 28aff345a..602f13439 100644 --- a/roles/aws/aws_rds/tasks/main.yml +++ b/roles/aws/aws_rds/tasks/main.yml @@ -233,3 +233,18 @@ - aws_rds.backup is defined - aws_rds.backup | length > 0 - "'aurora' not in aws_rds.engine" + +- name: Assign Aurora resource to backup plan. + ansible.builtin.include_role: + name: aws/aws_backup + tasks_from: resource + vars: + backup: + backup_plan_name: "{{ aws_rds.backup }}" + selection_name: "AURORA-{{ aws_rds.name }}-{{ _env_type }}" + resource_id: "{{ aws_rds.name }}" + resource_type: "cluster" + when: + - aws_rds.backup is defined + - aws_rds.backup | length > 0 + - "'aurora' in aws_rds.engine" From 6717286e0747165947d5ccf9b9cfb8b05c3133d2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Matej=20=C5=A0tajduhar?= <30931414+matej5@users.noreply.github.com> Date: Thu, 18 Sep 2025 15:10:52 +0200 Subject: [PATCH 23/26] SG-creation-update (#2605) * SG-creation-update * Updating-lambda-tasks-to-handle-various-file-options * Updating-lambda-tasks-for-url-handling * Updating-aws_admin_tools-for-aws_lambda * Updating-aws_admin_tools-for-aws_lambda * Setting-loop-item * Setting-loop-item-2 * Updating-vpc-sec-group-vars * Removing-extra-vars-for-git-module * Adding-default-for-git_url * Cleaning-up-tasks * Updating-ansible-lint * Updating-ansible-lint * Ommiting-name-if-no-sec_group-name-defined * Removing-loop-var --------- Co-authored-by: Matej Stajduhar --- .ansible-lint | 3 + roles/aws/aws_admin_tools/defaults/main.yml | 1 + .../tasks/lambda_functions.yml | 12 +++- .../templates/api_change_asg_scaling.py.j2 | 30 --------- .../templates/api_get_acl_list.py.j2 | 63 ------------------- .../templates/api_get_forecasted_costs.py.j2 | 39 ------------ .../templates/api_get_ip_set.py.j2 | 21 ------- .../templates/api_update_ip_set.py.j2 | 19 ------ roles/aws/aws_ami/tasks/repack.yml | 34 +++++----- .../aws_ec2_autoscale_cluster/tasks/main.yml | 32 ++++++---- roles/aws/aws_lambda/tasks/handle_single.yml | 15 +++++ roles/aws/aws_lambda/tasks/handle_url.yml | 33 ++++++++++ roles/aws/aws_lambda/tasks/handle_zip.yml | 7 +++ roles/aws/aws_lambda/tasks/main.yml | 19 +++--- roles/aws/aws_vpc/tasks/main.yml | 21 +++---- roles/aws/aws_vpc/tasks/security_group.yml | 23 +++++-- roles/aws/aws_vpc_subnet/tasks/subnet.yml | 27 ++++---- 17 files changed, 159 insertions(+), 240 deletions(-) delete mode 100644 roles/aws/aws_admin_tools/templates/api_change_asg_scaling.py.j2 delete mode 100644 roles/aws/aws_admin_tools/templates/api_get_acl_list.py.j2 delete mode 100644 roles/aws/aws_admin_tools/templates/api_get_forecasted_costs.py.j2 delete mode 100644 roles/aws/aws_admin_tools/templates/api_get_ip_set.py.j2 delete mode 100644 roles/aws/aws_admin_tools/templates/api_update_ip_set.py.j2 create mode 100644 roles/aws/aws_lambda/tasks/handle_single.yml create mode 100644 roles/aws/aws_lambda/tasks/handle_url.yml create mode 100644 roles/aws/aws_lambda/tasks/handle_zip.yml diff --git a/.ansible-lint b/.ansible-lint index 02d2d1c31..040449dff 100644 --- a/.ansible-lint +++ b/.ansible-lint @@ -14,5 +14,8 @@ skip_list: - template-instead-of-copy # to skip over roles/ssl/tasks/copy.yml errors, temporarily. - name[template] # it doesn't like Jinja templates being in the middle of a task name, which seems silly to me. - name[casing] # sometimes included Galaxy roles break linting rules and cause failures + - args[module] # causing odd issue with ACL role + - jinja[spacing] # pendantic! we get these from GitHub Actions anyway + - latest[git] # Breaks if there is no version control in task exclude_paths: - roles/contrib/ # we don't control these roles diff --git a/roles/aws/aws_admin_tools/defaults/main.yml b/roles/aws/aws_admin_tools/defaults/main.yml index b13c2aff1..40b8c7ddb 100644 --- a/roles/aws/aws_admin_tools/defaults/main.yml +++ b/roles/aws/aws_admin_tools/defaults/main.yml @@ -7,6 +7,7 @@ aws_admin_tools: - name: "change_asg_scaling" # Name used for creating API Gateway and Lambda functions resource: api # Refers to type of resource for needed function (can be api, schedule and s3) type: POST # Type of HTTP method, can be GET and POST + git_url: "git@codeenigma.net:functions/example_function.git" # If git_url is provided it will be used to download code from gitlab/github policies: [] # List of policies to use for lambda function inline_policies: # Inline policies to allow defining least privilages access name: "change_asg_scaling" # Name of inline policies diff --git a/roles/aws/aws_admin_tools/tasks/lambda_functions.yml b/roles/aws/aws_admin_tools/tasks/lambda_functions.yml index e5be0b24f..0c04382b5 100644 --- a/roles/aws/aws_admin_tools/tasks/lambda_functions.yml +++ b/roles/aws/aws_admin_tools/tasks/lambda_functions.yml @@ -3,6 +3,16 @@ name: "{{ item.resource }}_{{ item.name }}" register: _iam_lambda +- name: Set previous command output into variable. + ansible.builtin.set_fact: + _function_file: "{{ lookup('template', item.resource + '_' + item.name + '.py.j2') }}" + when: item.git_url is not defined + +- name: Set previous command output into variable. + ansible.builtin.set_fact: + _function_file: "{{ item.git_url }}" + when: item.git_url is defined + - name: Create Lambda function. ansible.builtin.include_role: name: aws/aws_lambda @@ -13,7 +23,7 @@ timeout: "{{ item.timeout | default(aws_admin_tools.timeout) }}" role: "{{ aws_iam_role._result[item.resource + '_' + item.name] }}" runtime: "{{ aws_admin_tools.runtime }}" - function_file: "{{ lookup('template', item.resource + '_' + item.name + '.py.j2') }}" + function_file: "{{ _function_file }}" s3_bucket: "{{ _general_bucket }}" s3_bucket_prefix: "lambda-functions" tags: diff --git a/roles/aws/aws_admin_tools/templates/api_change_asg_scaling.py.j2 b/roles/aws/aws_admin_tools/templates/api_change_asg_scaling.py.j2 deleted file mode 100644 index 2cfc32e09..000000000 --- a/roles/aws/aws_admin_tools/templates/api_change_asg_scaling.py.j2 +++ /dev/null @@ -1,30 +0,0 @@ -import json -import boto3 - -asg_cli = boto3.client('autoscaling', region_name="{{ _aws_region }}") - -def lambda_handler(event, context): - - policies = asg_cli.describe_policies( - AutoScalingGroupName=event['asg_name'] - ) - - if policies['ScalingPolicies'][0]['Enabled']: - enable = False - else: - enable = True - - for inst in policies['ScalingPolicies']: - put_result = asg_cli.put_scaling_policy( - AutoScalingGroupName=inst['AutoScalingGroupName'], - PolicyName=inst['PolicyName'], - PolicyType=inst['PolicyType'], - AdjustmentType=inst['AdjustmentType'], - ScalingAdjustment=inst['ScalingAdjustment'], - Enabled=enable - ) - - return { - 'statusCode': 200, - 'body': event - } diff --git a/roles/aws/aws_admin_tools/templates/api_get_acl_list.py.j2 b/roles/aws/aws_admin_tools/templates/api_get_acl_list.py.j2 deleted file mode 100644 index 6271f2acf..000000000 --- a/roles/aws/aws_admin_tools/templates/api_get_acl_list.py.j2 +++ /dev/null @@ -1,63 +0,0 @@ -import json -import boto3 - -waf_regional = boto3.client("wafv2", region_name="{{ _aws_region }}") -waf_cf = boto3.client("wafv2", region_name="us-east-1") -cf_client = boto3.client('cloudfront', region_name="us-east-1") - -def get_rules(waf_client, acl_name, acl_id, scope): - rule_details = waf_client.get_web_acl(Name=acl_name, Scope=scope, Id=acl_id) - return [ - { - 'Name': rule['Name'], - 'Priority': rule['Priority'] - } - for rule in rule_details['WebACL']['Rules'] - ] - -def get_cf_associations(cf_client, web_acl_arn): - dist_list = cf_client.list_distributions_by_web_acl_id(WebACLId=web_acl_arn) - return [item['DomainName'] for item in dist_list.get('DistributionList', {}).get('Items', [])] - -def get_regional_associations(waf_client, web_acl_arn): - associations = [] - for res_type in ['APPLICATION_LOAD_BALANCER', 'API_GATEWAY']: - res_list = waf_client.list_resources_for_web_acl(WebACLArn=web_acl_arn, ResourceType=res_type) - if res_list.get('ResourceArns'): - associations.append({res_type: res_list['ResourceArns']}) - return associations - -def get_web_acls(waf_client, scope, include_cf_associations=False, cf_client=None): - response = waf_client.list_web_acls(Scope=scope) - web_acls = [] - - for acl in response['WebACLs']: - rules = get_rules(waf_client, acl['Name'], acl['Id'], scope) - associations = ( - get_cf_associations(cf_client, acl['ARN']) if include_cf_associations - else get_regional_associations(waf_client, acl['ARN']) - ) - web_acls.append({ - 'Name': acl['Name'], - 'Id': acl['Id'], - 'Rules': rules, - 'Association': associations - }) - return web_acls - -def lambda_handler(event, context): - # CloudFront ACLs (Global Scope) - cf_acls = get_web_acls(waf_cf, scope='CLOUDFRONT', include_cf_associations=True, cf_client=cf_client) - - # Regional ACLs (EU-West-1) - regional_acls = get_web_acls(waf_regional, scope='REGIONAL') - - return { - 'statusCode': 200, - 'ACLs': { - 'CloudFront': cf_acls, - 'Regional': { - "{{ _aws_region }}": regional_acls - } - } - } diff --git a/roles/aws/aws_admin_tools/templates/api_get_forecasted_costs.py.j2 b/roles/aws/aws_admin_tools/templates/api_get_forecasted_costs.py.j2 deleted file mode 100644 index 6bed7668b..000000000 --- a/roles/aws/aws_admin_tools/templates/api_get_forecasted_costs.py.j2 +++ /dev/null @@ -1,39 +0,0 @@ -import json -import calendar -from datetime import datetime -import boto3 - -costExpl = boto3.client('ce') - -def lambda_handler(event, context): - currDay=datetime.now().day - currMonth=datetime.now().month - print(currMonth) - currYear=datetime.now().year - print(currYear) - lastDay=calendar.monthrange(currYear, currMonth) - - if currMonth < 10: - currMonth = '0' + str(currMonth) - nextDay = currDay + 1 - if currDay < 10: - currDay = '0' + str(currDay) - if nextDay < 10: - nextDay = '0' + str(nextDay) - - startDate=str(currYear) + '-' + str(currMonth) + '-' + str(currDay) - endDate=str(currYear) + '-' + str(currMonth) + '-' + str(nextDay) - - estimatedCost = costExpl.get_cost_forecast( - TimePeriod={ - 'Start': startDate, - 'End': endDate - }, - Granularity='MONTHLY', - Metric='BLENDED_COST' - ) - return { - 'statusCode': 200, - 'Amount': estimatedCost['Total']['Amount'] + ' ' + estimatedCost['Total']['Unit'], - 'Between': estimatedCost['ForecastResultsByTime'][0]['TimePeriod']['Start'] + ' - ' + estimatedCost['ForecastResultsByTime'][0]['TimePeriod']['End'] - } diff --git a/roles/aws/aws_admin_tools/templates/api_get_ip_set.py.j2 b/roles/aws/aws_admin_tools/templates/api_get_ip_set.py.j2 deleted file mode 100644 index c44843bda..000000000 --- a/roles/aws/aws_admin_tools/templates/api_get_ip_set.py.j2 +++ /dev/null @@ -1,21 +0,0 @@ -import json -import boto3 - -waf_cli = boto3.client("wafv2") - -def lambda_handler(event, context): - - print("Gathering instance details.") - ip_set=waf_cli.get_ip_set( - Name=event['set_name'], - Scope='REGIONAL', - Id=event['id'] - ) - - return { - 'statusCode': 200, - 'name': ip_set['IPSet']['Name'], - 'id': ip_set['IPSet']['Id'], - 'addresses': ip_set['IPSet']['Addresses'], - 'lock_token': ip_set['LockToken'], - } diff --git a/roles/aws/aws_admin_tools/templates/api_update_ip_set.py.j2 b/roles/aws/aws_admin_tools/templates/api_update_ip_set.py.j2 deleted file mode 100644 index 08781fb2b..000000000 --- a/roles/aws/aws_admin_tools/templates/api_update_ip_set.py.j2 +++ /dev/null @@ -1,19 +0,0 @@ -import json -import boto3 - -waf_cli = boto3.client("wafv2") - -def lambda_handler(event, context): - - response = waf_cli.update_ip_set( - Name=event['name'], - Scope=event['scope'], - Id=event['id'], - Addresses=event['addresses'], - LockToken=event['lock_token'] -) - - return { - 'statusCode': 200, - 'body': response - } diff --git a/roles/aws/aws_ami/tasks/repack.yml b/roles/aws/aws_ami/tasks/repack.yml index 47ba4904d..9a9c899ca 100644 --- a/roles/aws/aws_ami/tasks/repack.yml +++ b/roles/aws/aws_ami/tasks/repack.yml @@ -9,21 +9,25 @@ register: aws_ami_running_instances - name: Create a Security Group to access the controller. - amazon.aws.ec2_security_group: - profile: "{{ aws_ami.aws_profile }}" - region: "{{ aws_ami.region }}" - name: "{{ aws_ami.repack.cluster_name }}-repacker" - tags: "{{ aws_ami.tags }}" - state: present - vpc_id: "{{ aws_ami.repack.vpc_id }}" - description: "Allow controller to access the {{ aws_ami.ami_name }}-repacking instance" - rules: - - proto: tcp - ports: - - 22 - cidr_ip: "{{ aws_ami.repack.controller_cidr }}" - rule_desc: "Allow controller to access the {{ aws_ami.ami_name }}-repacking instance" - rules_egress: [] + ansible.builtin.include_role: + name: aws/aws_vpc + tasks_from: security_group + vars: + aws_vpc: + profile: "{{ aws_ami.aws_profile }}" + region: "{{ aws_ami.region }}" + name: "{{ aws_ami.repack.cluster_name }}-repacker" + tags: "{{ aws_ami.tags }}" + state: present + id: "{{ aws_ami.repack.vpc_id }}" + description: "Allow controller to access the {{ aws_ami.ami_name }}-repacking instance" + rules: + - proto: tcp + ports: + - 22 + cidr_ip: "{{ aws_ami.repack.controller_cidr }}" + rule_desc: "Allow controller to access the {{ aws_ami.ami_name }}-repacking instance" + rules_egress: [] - name: Create an AMI with an existing EC2 instance. amazon.aws.ec2_ami: diff --git a/roles/aws/aws_ec2_autoscale_cluster/tasks/main.yml b/roles/aws/aws_ec2_autoscale_cluster/tasks/main.yml index 36ed43672..9ea852e3c 100644 --- a/roles/aws/aws_ec2_autoscale_cluster/tasks/main.yml +++ b/roles/aws/aws_ec2_autoscale_cluster/tasks/main.yml @@ -30,23 +30,29 @@ when: aws_ec2_autoscale_cluster.vpc_name is not defined or (aws_ec2_autoscale_cluster.vpc_name | length) == 0 - name: Create matching Security Group. - amazon.aws.ec2_security_group: - profile: "{{ aws_ec2_autoscale_cluster.aws_profile }}" - region: "{{ aws_ec2_autoscale_cluster.region }}" - name: "{{ aws_ec2_autoscale_cluster.name }}" - tags: "{{ aws_ec2_autoscale_cluster.tags | combine({'Name': aws_ec2_autoscale_cluster.name}) }}" - state: "{{ aws_ec2_autoscale_cluster.state }}" - vpc_id: "{{ _aws_ec2_autoscale_cluster_vpc_id }}" - description: "Allow internal traffic for cluster {{ aws_ec2_autoscale_cluster.name }}" - rules: - - proto: all - group_name: "{{ aws_ec2_autoscale_cluster.name }}" - rule_desc: "Allow internal traffic for cluster {{ aws_ec2_autoscale_cluster.name }}" + ansible.builtin.include_role: + name: aws/aws_vpc + tasks_from: security_group + vars: + aws_vpc: + name: "{{ aws_ec2_autoscale_cluster.name }}" + profile: "{{ aws_ec2_autoscale_cluster.aws_profile }}" + region: "{{ aws_ec2_autoscale_cluster.region }}" + tags: "{{ aws_ec2_autoscale_cluster.tags | combine({'Name': aws_ec2_autoscale_cluster.name}) }}" + state: "{{ aws_ec2_autoscale_cluster.state }}" + id: "{{ _aws_ec2_autoscale_cluster_vpc_id }}" + description: "Allow internal traffic for cluster {{ aws_ec2_autoscale_cluster.name }}" + rules: + - proto: all + group_name: "{{ aws_ec2_autoscale_cluster.name }}" rules_egress: - proto: all group_name: "{{ aws_ec2_autoscale_cluster.name }}" rule_desc: "Allow internal traffic for cluster {{ aws_ec2_autoscale_cluster.name }}" - register: _aws_ec2_autoscale_cluster_security_group + +- name: Set _aws_ec2_autoscale_cluster_security_group variable. + ansible.builtin.set_fact: + _aws_ec2_autoscale_cluster_security_group: "{{ aws_vpc._result[aws_ec2_autoscale_cluster.name] }}" - name: Reset subnets lists. ansible.builtin.set_fact: diff --git a/roles/aws/aws_lambda/tasks/handle_single.yml b/roles/aws/aws_lambda/tasks/handle_single.yml new file mode 100644 index 000000000..45afa3fab --- /dev/null +++ b/roles/aws/aws_lambda/tasks/handle_single.yml @@ -0,0 +1,15 @@ +- name: Check and clean previous Lambda function. + ansible.builtin.file: + path: "{{ _ce_provision_build_dir }}/{{ aws_lambda.name }}.py" + state: absent + +- name: Write Lambda function. + ansible.builtin.copy: + content: "{{ aws_lambda.function_file }}" + dest: "{{ _ce_provision_build_dir }}/{{ aws_lambda.name }}.py" + +- name: Create a zip archive of Lambda function. + community.general.archive: + path: "{{ _ce_provision_build_dir }}/{{ aws_lambda.name }}.py" + dest: "{{ _ce_provision_build_dir }}/{{ aws_lambda.name }}.zip" + format: zip diff --git a/roles/aws/aws_lambda/tasks/handle_url.yml b/roles/aws/aws_lambda/tasks/handle_url.yml new file mode 100644 index 000000000..fa5ca0202 --- /dev/null +++ b/roles/aws/aws_lambda/tasks/handle_url.yml @@ -0,0 +1,33 @@ +- name: Clone git repo. + ansible.builtin.git: + repo: "{{ aws_lambda.function_file }}" + dest: /tmp/funct + +- name: Find all .j2 template files. + ansible.builtin.find: + paths: "{{ work_dir }}/{{ repo_name }}" + patterns: "*.j2" + recurse: true + register: _j2_files + +- name: Template all .j2 files. + ansible.builtin.template: + src: "{{ item.path }}" + dest: "{{ item.path | regex_replace('\\.j2$', '') }}" + loop: "{{ _j2_files.files }}" + loop_control: + label: "{{ item.path }}" + +- name: Remove original .j2 files after templating. + ansible.builtin.file: + path: "{{ item.path }}" + state: absent + loop: "{{ _j2_files.files }}" + loop_control: + label: "{{ item.path }}" + +- name: Copy a zip archive of Lambda function. + community.general.archive: + path: "/tmp/funct" + dest: "{{ _ce_provision_build_dir }}/{{ aws_lambda.name }}.zip" + format: zip diff --git a/roles/aws/aws_lambda/tasks/handle_zip.yml b/roles/aws/aws_lambda/tasks/handle_zip.yml new file mode 100644 index 000000000..47be33206 --- /dev/null +++ b/roles/aws/aws_lambda/tasks/handle_zip.yml @@ -0,0 +1,7 @@ +- name: Copy a zip archive of Lambda function. + ansible.builtin.copy: + src: "{{ aws_lambda.function_file }}" + dest: "{{ _ce_provision_build_dir }}/{{ aws_lambda.name }}.zip" + owner: controller + group: controller + mode: '0644' diff --git a/roles/aws/aws_lambda/tasks/main.yml b/roles/aws/aws_lambda/tasks/main.yml index 0ad4d4876..d0b5d6e8f 100644 --- a/roles/aws/aws_lambda/tasks/main.yml +++ b/roles/aws/aws_lambda/tasks/main.yml @@ -22,16 +22,17 @@ vars: input_string: "{{ aws_lambda.function_file }}" -- name: Write Lambda function. - ansible.builtin.copy: - content: "{{ aws_lambda.function_file }}" - dest: "{{ _ce_provision_build_dir }}/{{ aws_lambda.name }}.py" +- name: Handle single file. + ansible.builtin.include_tasks: handle_single.yml + when: _string_type == 'single' -- name: Create a zip archive of Lambda function. - community.general.archive: - path: "{{ _ce_provision_build_dir }}/{{ aws_lambda.name }}.py" - dest: "{{ _ce_provision_build_dir }}/{{ aws_lambda.name }}.zip" - format: zip +- name: Handle zip file. + ansible.builtin.include_tasks: handle_zip.yml + when: _string_type == 'zip' + +- name: Handle url. + ansible.builtin.include_tasks: handle_url.yml + when: _string_type == 'url' - name: Place Lambda function in S3 bucket. amazon.aws.s3_object: diff --git a/roles/aws/aws_vpc/tasks/main.yml b/roles/aws/aws_vpc/tasks/main.yml index 9901abd85..ad461150c 100644 --- a/roles/aws/aws_vpc/tasks/main.yml +++ b/roles/aws/aws_vpc/tasks/main.yml @@ -10,22 +10,19 @@ register: _aws_vpc_vpc - name: Ensure default Security group is tagged. - amazon.aws.ec2_security_group: - name: "default" - profile: "{{ aws_vpc.aws_profile }}" - region: "{{ aws_vpc.region }}" - tags: "{{ aws_vpc.tags }}" - state: "{{ aws_vpc.state }}" - vpc_id: "{{ _aws_vpc_vpc.vpc.id }}" - description: "default VPC security group" - purge_rules: false + ansible.builtin.include_tasks: "security_group.yml" + vars: + aws_vpc: + name: "default" + id: "{{ _aws_vpc_vpc.vpc.id }}" + description: "default VPC security group" + purge_rules: false - name: Create VPC Security groups. ansible.builtin.include_tasks: "security_group.yml" - with_items: "{{ aws_vpc.security_groups }}" + loop: "{{ aws_vpc.security_groups | list }}" loop_control: - loop_var: security_group - label: "{{ security_group.name }}" + loop_var: _sec_group - name: Create IGW. amazon.aws.ec2_vpc_igw: diff --git a/roles/aws/aws_vpc/tasks/security_group.yml b/roles/aws/aws_vpc/tasks/security_group.yml index 4e737b3cd..9af4121dd 100644 --- a/roles/aws/aws_vpc/tasks/security_group.yml +++ b/roles/aws/aws_vpc/tasks/security_group.yml @@ -1,14 +1,25 @@ +- name: Configure vars if looping over list. + ansible.builtin.set_fact: + aws_vpc: + name: "{{ _sec_group.name | default('') }}" + tags: "{{ _aws_vpc_vpc.vpc.tags | combine({'Name': _sec_group.name}) }}" + id: "{{ _aws_vpc_vpc.vpc.id }}" + description: "{{ _sec_group.description }}" + rules: "{{ _sec_group.rules | default(omit) }}" + rules_egress: "{{ _sec_group.rules_egress | default(omit) }}" + when: _sec_group is defined + - name: Create Security Group. amazon.aws.ec2_security_group: - name: "{{ security_group.name }}" + name: "{{ aws_vpc.name }}" profile: "{{ aws_vpc.aws_profile }}" region: "{{ aws_vpc.region }}" - tags: "{{ aws_vpc.tags | combine({'Name': security_group.name}) }}" + tags: "{{ aws_vpc.tags }}" state: "{{ aws_vpc.state }}" - vpc_id: "{{ _aws_vpc_vpc.vpc.id }}" - description: "{{ security_group.description | default('') }}" - rules: "{{ security_group.rules | default(omit) }}" - rules_egress: "{{ security_group.rules_egress | default(omit) }}" + vpc_id: "{{ aws_vpc.id }}" + description: "{{ aws_vpc.description | default('') }}" + rules: "{{ aws_vpc.rules | default(omit) }}" + rules_egress: "{{ aws_vpc.rules_egress | default(omit) }}" purge_rules: "{{ aws_vpc.purge_rules | default(omit) }}" register: _aws_vpc_result diff --git a/roles/aws/aws_vpc_subnet/tasks/subnet.yml b/roles/aws/aws_vpc_subnet/tasks/subnet.yml index 3d89ccc5e..52ceaa6af 100644 --- a/roles/aws/aws_vpc_subnet/tasks/subnet.yml +++ b/roles/aws/aws_vpc_subnet/tasks/subnet.yml @@ -23,18 +23,21 @@ when: subnet.nat_ipv4 is defined and subnet.nat_ipv4 - name: Create matching Security Group. - amazon.aws.ec2_security_group: - name: "{{ subnet.name }}" - profile: "{{ aws_vpc_subnet.aws_profile }}" - region: "{{ aws_vpc_subnet.region }}" - tags: "{{ aws_vpc_subnet.tags | combine({'Name': subnet.name}) }}" - state: "{{ aws_vpc_subnet.state }}" - vpc_id: "{{ _aws_vpc_subnet_vpc_id }}" - description: "Allow internal traffic for subnet {{ subnet.name }}" - rules: - - proto: all - group_name: "{{ subnet.name }}" - rule_desc: "Allow internal traffic for subnet {{ subnet.name }}" + ansible.builtin.include_role: + name: aws/aws_vpc + tasks_from: security_group + vars: + aws_vpc: + name: "{{ subnet.name }}" + profile: "{{ aws_vpc_subnet.aws_profile }}" + region: "{{ aws_vpc_subnet.region }}" + tags: "{{ aws_vpc_subnet.tags | combine({'Name': subnet.name}) }}" + state: "{{ aws_vpc_subnet.state }}" + id: "{{ _aws_vpc_subnet_vpc_id }}" + description: "Allow internal traffic for subnet {{ subnet.name }}" + rules: + - proto: all + group_name: "{{ subnet.name }}" rules_egress: - proto: all group_name: "{{ subnet.name }}" From 3bcee174aa4a8be678c2362a1d2fe847740e0ccf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Matej=20=C5=A0tajduhar?= <30931414+matej5@users.noreply.github.com> Date: Thu, 18 Sep 2025 17:42:40 +0200 Subject: [PATCH 24/26] Fixing-copy-AMI-to-backup-region (#2684) Co-authored-by: Matej Stajduhar --- roles/aws/aws_ec2_autoscale_cluster/tasks/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/roles/aws/aws_ec2_autoscale_cluster/tasks/main.yml b/roles/aws/aws_ec2_autoscale_cluster/tasks/main.yml index 9ea852e3c..3dcf5766b 100644 --- a/roles/aws/aws_ec2_autoscale_cluster/tasks/main.yml +++ b/roles/aws/aws_ec2_autoscale_cluster/tasks/main.yml @@ -784,4 +784,4 @@ aws_profile: "{{ aws_ec2_autoscale_cluster.aws_profile }}" source_region: "{{ aws_ec2_autoscale_cluster.region }}" region: "{{ aws_backup.copy_vault.region }}" - source_image_id: "{{ aws_ec2_autoscale_cluster_image_latest }}" + source_image_id: "{{ aws_ec2_autoscale_cluster_image_latest.image_id }}" From 7bd773f756cf15d2fbf9e85b7ba2694d20915573 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Matej=20=C5=A0tajduhar?= <30931414+matej5@users.noreply.github.com> Date: Fri, 19 Sep 2025 11:04:17 +0200 Subject: [PATCH 25/26] Fixing-ami-copy-task (#2686) Co-authored-by: Matej Stajduhar --- roles/aws/aws_ec2_autoscale_cluster/tasks/main.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/roles/aws/aws_ec2_autoscale_cluster/tasks/main.yml b/roles/aws/aws_ec2_autoscale_cluster/tasks/main.yml index 3dcf5766b..db6eea26f 100644 --- a/roles/aws/aws_ec2_autoscale_cluster/tasks/main.yml +++ b/roles/aws/aws_ec2_autoscale_cluster/tasks/main.yml @@ -779,9 +779,9 @@ - aws_ec2_autoscale_cluster.route_53.zone is defined - aws_ec2_autoscale_cluster.route_53.zone | length > 0 -- name: Copy AMI to backup region. - community.aws.ec2_ami_copy: - aws_profile: "{{ aws_ec2_autoscale_cluster.aws_profile }}" - source_region: "{{ aws_ec2_autoscale_cluster.region }}" - region: "{{ aws_backup.copy_vault.region }}" - source_image_id: "{{ aws_ec2_autoscale_cluster_image_latest.image_id }}" +#- name: Copy AMI to backup region. +# community.aws.ec2_ami_copy: +# aws_profile: "{{ aws_ec2_autoscale_cluster.aws_profile }}" +# source_region: "{{ aws_ec2_autoscale_cluster.region }}" +# region: "{{ aws_backup.copy_vault.region }}" +# source_image_id: "{{ aws_ec2_autoscale_cluster_image_latest.image_id }}" From 9472416296a10db67de2321dc1092ca5f55a991a Mon Sep 17 00:00:00 2001 From: Greg Harvey Date: Mon, 22 Sep 2025 13:27:41 +0200 Subject: [PATCH 26/26] Updating clamav command to use flock avoiding duplicate processes running. --- roles/debian/clamav/defaults/main.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/roles/debian/clamav/defaults/main.yml b/roles/debian/clamav/defaults/main.yml index 4706b665f..de2ee3029 100644 --- a/roles/debian/clamav/defaults/main.yml +++ b/roles/debian/clamav/defaults/main.yml @@ -12,12 +12,12 @@ clamav: # scheduled scans, set to an empty list for no timers timers: - clamscan_daily: - timer_command: /usr/local/clamav/script/clamscan_daily # path to clamscan wrapper script, ensure it is defined in clamav.scripts - timer_OnCalendar: "*-*-* 02:30:00" # see systemd.time documentation - https://www.freedesktop.org/software/systemd/man/latest/systemd.time.html#Calendar%20Events - server_name: "{{ inventory_hostname }}" # for identification via email, defaults to Ansible inventory name. + timer_command: /usr/bin/flock -n /var/run/clamscan.lock -c /usr/local/clamav/script/clamscan_daily # command to run clamscan wrapper script, ensure script location is defined in clamav.scripts + timer_OnCalendar: "*-*-* 02:30:00" # see systemd.time documentation - https://www.freedesktop.org/software/systemd/man/latest/systemd.time.html#Calendar%20Events + server_name: "{{ inventory_hostname }}" # for identification via email, defaults to Ansible inventory name. log_location: /var/log/clamav - send_mail: false # Important - will not send any emails by default. - send_on_fail: true # Only sends emails on scan failure, will not email for successful scans. + send_mail: false # Important - will not send any emails by default. + send_on_fail: true # Only sends emails on scan failure, will not email for successful scans. report_recipient_email: mail@example.com report_sender_email: admin@server.example.com - install_clamdscan: false # flag to install additional 'clamdscan' package + install_clamdscan: false # flag to install additional 'clamdscan' package