diff --git a/plugins/module_utils/disk.py b/plugins/module_utils/disk.py index bd871ad3..29f36883 100644 --- a/plugins/module_utils/disk.py +++ b/plugins/module_utils/disk.py @@ -198,9 +198,10 @@ def needs_reboot(self, action: str, desired_disk=None) -> bool: # Delete and change type. if desired_disk and action == "update" and self.type != desired_disk.type: return True - if ( - action == "delete" and self.type == "ide_cdrom" - ): # ide_cdrom can never be deleted when VM is running. + if action == "delete" and self.type == "ide_cdrom": + # ide_cdrom can never be deleted when VM is running. + # Also other disks types cannot be deleted when VM is running + # if HyperCore thinks disk is being "used". return True return False diff --git a/plugins/module_utils/errors.py b/plugins/module_utils/errors.py index b62c7edf..da731bdd 100644 --- a/plugins/module_utils/errors.py +++ b/plugins/module_utils/errors.py @@ -7,7 +7,8 @@ __metaclass__ = type -from typing import Union +import json +from typing import Union, Dict, Any from ansible.module_utils.urls import Request @@ -111,3 +112,14 @@ class ScaleTimeoutError(ScaleComputingError): def __init__(self, data: Union[str, Exception]): self.message = f"Request timed out: {data}." super(ScaleTimeoutError, self).__init__(self.message) + + +class TaskTagError(ScaleComputingError): + def __init__(self, task_status: Dict[Any, Any]): + # task_status is dict returned by GET /rest/v1/TaskTag + message = "There was a problem during this task execution." + message += f" Task details: {json.dumps(task_status)}" + self.message = message + self.task_status_state = task_status["state"] + self.task_status = task_status + super().__init__(self.message) diff --git a/plugins/module_utils/task_tag.py b/plugins/module_utils/task_tag.py index 09872c8a..6075758e 100644 --- a/plugins/module_utils/task_tag.py +++ b/plugins/module_utils/task_tag.py @@ -9,7 +9,6 @@ __metaclass__ = type -import json from time import sleep from ..module_utils import errors @@ -47,9 +46,7 @@ def wait_task( "ERROR", "UNINITIALIZED", ): # TaskTag has finished unsucessfully or was never initialized, both are errors. - msg = "There was a problem during this task execution." - msg += f" Task details: {json.dumps(task_status)}" - raise errors.ScaleComputingError(msg) + raise errors.TaskTagError(task_status) if task_status.get("state", "") not in ( "RUNNING", "QUEUED", diff --git a/plugins/module_utils/vm.py b/plugins/module_utils/vm.py index 3af9a5b0..002dbf6f 100644 --- a/plugins/module_utils/vm.py +++ b/plugins/module_utils/vm.py @@ -1091,6 +1091,10 @@ def _delete_not_used_disks(cls, module, rest_client, vm, changed, disk_key): ): to_delete = False if to_delete: + # HyperCore is sometimes able to delete disk on running VM, + # but sometimes we need to shutdown VM to remove disk. + # It is hard to know in advance if shutdown is required. + # We try to remove disk without shutdown, if delete fails, we shutdown VM and try again. if existing_disk.needs_reboot("delete"): vm.do_shutdown_steps(module, rest_client) task_tag = rest_client.delete_record( @@ -1099,10 +1103,51 @@ def _delete_not_used_disks(cls, module, rest_client, vm, changed, disk_key): ), module.check_mode, ) - TaskTag.wait_task(rest_client, task_tag, module.check_mode) + try: + TaskTag.wait_task(rest_client, task_tag, module.check_mode) + except errors.TaskTagError as ex: + # Delete failed, maybe because VM was running and disk was in use. + # If VM is running, shutdown VM and retry delete. + if ex.task_status_state != "ERROR": + raise + if not cls._disk_remove_failed_because_vm_running(ex.task_status): + raise + vm_fresh_data = rest_client.get_record( + f"/rest/v1/VirDomain/{vm.uuid}", must_exist=True + ) + if vm_fresh_data["state"] != "RUNNING": + raise + # shutdown and retry remove + vm.do_shutdown_steps(module, rest_client) + task_tag = rest_client.delete_record( + "{0}/{1}".format( + "/rest/v1/VirDomainBlockDevice", existing_disk.uuid + ), + module.check_mode, + ) + TaskTag.wait_task(rest_client, task_tag, module.check_mode) changed = True return changed + @staticmethod + def _disk_remove_failed_because_vm_running(task_status: Dict): + # Look at task_tag dict returned by HyperCore to decide if disk remove failed + # because VM is running, and VM shutdown will allow us to remove the disk. + # What we search for in formattedMessage is HyperCore version dependent: + # 9.2.17 - "Unable to delete block device from VM '%@': Still in use" + # 9.1.14 - "Virt Exception, code: 84, domain 10: Operation not supported: This type of disk cannot be hot unplugged" + + if ( + task_status["formattedMessage"] + == "Unable to delete block device from VM '%@': Still in use" + ): + return True + if task_status["formattedMessage"].endswith( + "Operation not supported: This type of disk cannot be hot unplugged" + ): + return True + return False + @staticmethod def _force_remove_all_disks(module, rest_client, vm, disks_before): # It's important to check if items is equal to empty list and empty list only (no None-s) @@ -1205,6 +1250,7 @@ def ensure_present_or_set(cls, module, rest_client, module_path): changed = cls._delete_not_used_disks( module, rest_client, vm_before, changed, disk_key ) + vm_before.vm_power_up(module, rest_client) if called_from_vm_disk: vm_after, disks_after = cls.get_vm_by_name(module, rest_client) return ( diff --git a/plugins/modules/vm.py b/plugins/modules/vm.py index c0990681..28783cdf 100644 --- a/plugins/modules/vm.py +++ b/plugins/modules/vm.py @@ -18,6 +18,13 @@ description: - Use this module to create, update or delete a VM. When creating or updating a VM, setting the disks, network nics and boot order is possible. + + - Module tries to remove disks from a running VM. + If disk cannot be removed from running VM, + then VM will be shutdown, disk will be removed, and VM is started back. + - VM has C(shutdown_timeout) time to respond to shutdown request. + If VM is not shutoff within I(shutdown_timeout), + then a force shutdown will be issued if C(force_reboot=True). version_added: 1.0.0 extends_documentation_fragment: - scale_computing.hypercore.cluster_instance diff --git a/plugins/modules/vm_disk.py b/plugins/modules/vm_disk.py index c48306ff..3679a96a 100644 --- a/plugins/modules/vm_disk.py +++ b/plugins/modules/vm_disk.py @@ -15,21 +15,28 @@ - Tjaž Eržen (@tjazsch) short_description: Manage VM's disks description: - Use this module to add, delete or set disks to the VM. - The module can also remove all disks from a VM, - attach and/or detach ISO image to the VM by ISO's name, - detach ISO image from the VM by disk's disk slot, - or update the existing disks (disk size etc.). - - For a given VM, a particular disk is selected by combination of (I(type), I(disk_slot)). - I(disk_slot) means slot on bus (IDE, virtio or SCSI bus). - - Changing disk I(type) can change its I(disk_slot). - For example, VM has one IDE CD-ROM and one virtio_disk. - The disk will have C(type=virtio_disk) and C(disk_slot=0), - and CD-ROM will have C(type=ide_cdrom) and C(disk_slot=0). - Changing disk I(type) to C(ide_disk) will as place disk on IDE bus, - after the CD-ROM, and disk will get C(disk_slot=1). + - Use this module to add, delete or set disks to the VM. + The module can also remove all disks from a VM, + attach and/or detach ISO image to the VM by ISO's name, + detach ISO image from the VM by disk's disk slot, + or update the existing disks (disk size etc.). + + - For a given VM, a particular disk is selected by combination of (I(type), I(disk_slot)). + I(disk_slot) means slot on bus (IDE, virtio or SCSI bus). + + - Changing disk I(type) can change its I(disk_slot). + For example, VM has one IDE CD-ROM and one virtio_disk. + The disk will have C(type=virtio_disk) and C(disk_slot=0), + and CD-ROM will have C(type=ide_cdrom) and C(disk_slot=0). + Changing disk I(type) to C(ide_disk) will as place disk on IDE bus, + after the CD-ROM, and disk will get C(disk_slot=1). + + - Module tries to remove disks from a running VM. + If disk cannot be removed from running VM, + then VM will be shutdown, disk will be removed, and VM is started back. + - VM has C(shutdown_timeout) time to respond to shutdown request. + If VM is not shutoff within I(shutdown_timeout), + then a force shutdown will be issued if C(force_reboot=True). version_added: 1.0.0 extends_documentation_fragment: @@ -325,6 +332,7 @@ def ensure_absent(module, rest_client): ) TaskTag.wait_task(rest_client, task_tag, module.check_mode) changed = True + vm_before.vm_power_up(module, rest_client) vm_after, disks_after = ManageVMDisks.get_vm_by_name(module, rest_client) return ( changed, @@ -337,15 +345,12 @@ def ensure_absent(module, rest_client): def run(module, rest_client): # ensure_absent is located in modules/vm_disk.py, since it's only used here # ensure_present_or_set is located in module_utils/vm.py, since it's also used in module vm. - vm, disks = ManageVMDisks.get_vm_by_name(module, rest_client) if module.params["state"] == "absent": changed, records, diff, reboot = ensure_absent(module, rest_client) else: changed, records, diff, reboot = ManageVMDisks.ensure_present_or_set( module, rest_client, MODULE_PATH ) - if vm: - vm.vm_power_up(module, rest_client) return changed, records, diff, reboot diff --git a/tests/integration/targets/vm__remove_disk/tasks/01_remove_disk_stopped.yml b/tests/integration/targets/vm__remove_disk/tasks/01_remove_disk_stopped.yml new file mode 100644 index 00000000..cefd0252 --- /dev/null +++ b/tests/integration/targets/vm__remove_disk/tasks/01_remove_disk_stopped.yml @@ -0,0 +1,102 @@ +--- +# ------------------------------------------------------------------------------------------------------------------ +# Cleanup + +- name: Delete the VMs, if they exist from before + scale_computing.hypercore.vm: + vm_name: "{{ item }}" + state: absent + loop: "{{ vm_names_all }}" + +# ------------------------------------------------------------------------------------------------------------------ +# Prepare +- name: Dummy task to store VM definition {{ vm_name_a }} + scale_computing.hypercore.vm: &vm_a_definition + vm_name: "{{ vm_name_a }}" + state: present + description: VM remove disk CI test + tags: + - Xlab + memory: "{{ '512 MB' | human_to_bytes }}" + vcpu: 1 + attach_guest_tools_iso: false + power_state: stop + nics: [] + boot_devices: [] + when: False + +# Create VM a +- name: Create the VM {{ vm_name_a }} + scale_computing.hypercore.vm: + <<: *vm_a_definition + disks: + - type: ide_disk + disk_slot: 0 + size: "{{ '10 GB' | human_to_bytes }}" + - type: ide_disk + disk_slot: 1 + size: "{{ '11 GB' | human_to_bytes }}" + register: vm_result +- name: Get info about VM {{ vm_name_a }} + scale_computing.hypercore.vm_info: + vm_name: "{{ vm_name_a }}" + register: vm_info_a_initial_result +- ansible.builtin.assert: + that: + - vm_result is changed + - vm_result.record.0.description == "VM remove disk CI test" + - vm_result.record.0.vm_name == "{{ vm_name_a }}" + - vm_result.record.0.disks | length == 2 + - vm_result.vm_rebooted == False + - vm_info_a_initial_result.records.0.description == "VM remove disk CI test" + - vm_info_a_initial_result.records.0.vm_name == "{{ vm_name_a }}" + - vm_info_a_initial_result.records.0.power_state == "stopped" + - vm_info_a_initial_result.records.0.disks | length == 2 + +# ------------------------------------------------------------------------------------------------------------------ +# Remove disk from stopped VM +# Shutdown is not allowed, and also not needed. +- name: Remove disk from stopped VM + block: &remove_disk_from_vm + - name: Remove disk from stopped VM {{ vm_name_a }} + scale_computing.hypercore.vm: + <<: *vm_a_definition + disks: + # remove 1st disk, keep 2nd disk + - type: ide_disk + disk_slot: 1 + size: "{{ '11 GB' | human_to_bytes }}" + force_reboot: True + shutdown_timeout: "{{ shutdown_timeout }}" + register: vm_result + - name: Get info about VM {{ vm_name_a }} + scale_computing.hypercore.vm_info: + vm_name: "{{ vm_name_a }}" + register: vm_info_a_result + - ansible.builtin.assert: + that: + - vm_result is succeeded + - vm_result.vm_rebooted == False + - vm_result.record.0.power_state == "stopped" + - vm_result.record.0.disks | length == 1 + - vm_info_a_result.records.0.power_state == "stopped" + - vm_info_a_result.records.0.disks | length == 1 + - vm_info_a_result.records.0.disks.0.uuid == vm_info_a_initial_result.records.0.disks.1.uuid +- ansible.builtin.assert: + that: + - vm_result is changed + +- name: Remove disk from stopped VM - idempotence + block: + *remove_disk_from_vm +- ansible.builtin.assert: + that: + - vm_result is not changed + + +# ----------------------------------Cleanup-------------------------------------------------------------------------------- +- name: Delete the VMs + scale_computing.hypercore.vm: + vm_name: "{{ item }}" + state: absent + loop: "{{ vm_names_all }}" diff --git a/tests/integration/targets/vm__remove_disk/tasks/02_remove_disk_running.yml b/tests/integration/targets/vm__remove_disk/tasks/02_remove_disk_running.yml new file mode 100644 index 00000000..4250ac50 --- /dev/null +++ b/tests/integration/targets/vm__remove_disk/tasks/02_remove_disk_running.yml @@ -0,0 +1,95 @@ +--- +# ------------------------------------------------------------------------------------------------------------------ +# Cleanup + +- name: Delete the VMs, if they exist from before + scale_computing.hypercore.vm: + vm_name: "{{ item }}" + state: absent + loop: "{{ vm_names_all }}" + +# ------------------------------------------------------------------------------------------------------------------ +# Prepare +- name: Dummy task to store VM definition {{ vm_name_a }} + scale_computing.hypercore.vm: &vm_a_definition + vm_name: "{{ vm_name_a }}" + state: present + description: VM remove disk CI test + tags: + - Xlab + memory: "{{ '512 MB' | human_to_bytes }}" + vcpu: 1 + attach_guest_tools_iso: false + power_state: start + nics: [] + boot_devices: [] + when: False + +# Create VM a +- name: Create the VM {{ vm_name_a }} + scale_computing.hypercore.vm: + <<: *vm_a_definition + disks: + - type: ide_disk + disk_slot: 0 + size: "{{ '10 GB' | human_to_bytes }}" + - type: ide_disk + disk_slot: 1 + size: "{{ '11 GB' | human_to_bytes }}" + register: vm_result +- name: Get info about VM {{ vm_name_a }} + scale_computing.hypercore.vm_info: + vm_name: "{{ vm_name_a }}" + register: vm_info_a_initial_result +- ansible.builtin.assert: + that: + - vm_result is changed + - vm_result.record.0.description == "VM remove disk CI test" + - vm_result.record.0.vm_name == "{{ vm_name_a }}" + - vm_result.record.0.disks | length == 2 + - vm_result.vm_rebooted == False + - vm_info_a_initial_result.records.0.description == "VM remove disk CI test" + - vm_info_a_initial_result.records.0.vm_name == "{{ vm_name_a }}" + - vm_info_a_initial_result.records.0.power_state == "started" + - vm_info_a_initial_result.records.0.disks | length == 2 + +# ------------------------------------------------------------------------------------------------------------------ +# Remove disk from running VM +# Shutdown is not allowed, and module will fail. +- name: Remove disk from running VM + block: &remove_disk_from_vm + - name: Remove disk from running VM {{ vm_name_a }} + scale_computing.hypercore.vm: + <<: *vm_a_definition + disks: + # remove 1st disk, keep 2nd disk + - type: ide_disk + disk_slot: 1 + size: "{{ '11 GB' | human_to_bytes }}" + force_reboot: False + shutdown_timeout: "{{ shutdown_timeout }}" + register: vm_result + ignore_errors: True + - name: Get info about VM {{ vm_name_a }} + scale_computing.hypercore.vm_info: + vm_name: "{{ vm_name_a }}" + register: vm_info_a_result + - ansible.builtin.assert: + that: + - vm_result is failed + - vm_result is not changed + - vm_info_a_result.records.0.power_state == "started" + - vm_info_a_result.records.0.disks | length == 2 + - vm_info_a_result.records.0.disks.0.uuid == vm_info_a_initial_result.records.0.disks.0.uuid + - vm_info_a_result.records.0.disks.1.uuid == vm_info_a_initial_result.records.0.disks.1.uuid + +- name: Remove disk from running VM - idempotence + block: + *remove_disk_from_vm + +# ----------------------------------Cleanup-------------------------------------------------------------------------------- +- name: Delete the VMs + scale_computing.hypercore.vm: + vm_name: "{{ item }}" + state: absent + loop: "{{ vm_names_all }}" diff --git a/tests/integration/targets/vm__remove_disk/tasks/03_remove_disk_running_with_reboot.yml b/tests/integration/targets/vm__remove_disk/tasks/03_remove_disk_running_with_reboot.yml new file mode 100644 index 00000000..24dbee9f --- /dev/null +++ b/tests/integration/targets/vm__remove_disk/tasks/03_remove_disk_running_with_reboot.yml @@ -0,0 +1,105 @@ +--- +# ------------------------------------------------------------------------------------------------------------------ +# Cleanup + +- name: Delete the VMs, if they exist from before + scale_computing.hypercore.vm: + vm_name: "{{ item }}" + state: absent + loop: "{{ vm_names_all }}" + +# ------------------------------------------------------------------------------------------------------------------ +# Prepare +- name: Dummy task to store VM definition {{ vm_name_a }} + scale_computing.hypercore.vm: &vm_a_definition + vm_name: "{{ vm_name_a }}" + state: present + description: VM remove disk CI test + tags: + - Xlab + memory: "{{ '512 MB' | human_to_bytes }}" + vcpu: 1 + attach_guest_tools_iso: false + power_state: start + nics: [] + boot_devices: [] + when: False + +# Create VM a +- name: Create the VM {{ vm_name_a }} + scale_computing.hypercore.vm: + <<: *vm_a_definition + disks: + - type: "{{ disk_type }}" + disk_slot: 0 + size: "{{ '10 GB' | human_to_bytes }}" + - type: "{{ disk_type }}" + disk_slot: 1 + size: "{{ '11 GB' | human_to_bytes }}" + register: vm_result +- name: Get info about VM {{ vm_name_a }} + scale_computing.hypercore.vm_info: + vm_name: "{{ vm_name_a }}" + register: vm_info_a_initial_result +- ansible.builtin.assert: + that: + - vm_result is changed + - vm_result.record.0.description == "VM remove disk CI test" + - vm_result.record.0.vm_name == "{{ vm_name_a }}" + - vm_result.record.0.disks | length == 2 + - vm_result.record.0.disks.0.type == disk_type + - vm_result.record.0.disks.1.type == disk_type + - vm_result.vm_rebooted == False + - vm_info_a_initial_result.records.0.description == "VM remove disk CI test" + - vm_info_a_initial_result.records.0.vm_name == "{{ vm_name_a }}" + - vm_info_a_initial_result.records.0.power_state == "started" + - vm_info_a_initial_result.records.0.disks | length == 2 + +# ------------------------------------------------------------------------------------------------------------------ +# Remove disk from running VM +# Force shutdown is allowed, and for disk_type: +# - ide_disk: force shutdown is needed to remove disk. +# - virtio_disk: in some HC3 versions disk can be removed from a running VM. +- name: Remove disk from running VM with reboot + block: &remove_disk_from_vm + - name: Remove disk from running VM {{ vm_name_a }} + scale_computing.hypercore.vm: + <<: *vm_a_definition + disks: + # remove 1st disk, keep 2nd disk + - type: "{{ disk_type }}" + disk_slot: 1 + size: "{{ '11 GB' | human_to_bytes }}" + force_reboot: True + shutdown_timeout: "{{ shutdown_timeout }}" + register: vm_result + - name: Get info about VM {{ vm_name_a }} + scale_computing.hypercore.vm_info: + vm_name: "{{ vm_name_a }}" + register: vm_info_a_result + - ansible.builtin.assert: + that: + - vm_result is succeeded + - vm_info_a_result.records.0.power_state == "started" + - vm_info_a_result.records.0.disks | length == 1 + - vm_info_a_result.records.0.disks.0.type == disk_type + - vm_info_a_result.records.0.disks.0.uuid == vm_info_a_initial_result.records.0.disks.1.uuid +- ansible.builtin.assert: + that: + - vm_result is changed + - vm_result.vm_rebooted == expected_vm_reboot + +- name: Remove disk from running VM with reboot - idempotence + block: + *remove_disk_from_vm +- ansible.builtin.assert: + that: + - vm_result is not changed + - vm_result.vm_rebooted == False + +# ----------------------------------Cleanup-------------------------------------------------------------------------------- +- name: Delete the VMs + scale_computing.hypercore.vm: + vm_name: "{{ item }}" + state: absent + loop: "{{ vm_names_all }}" diff --git a/tests/integration/targets/vm__remove_disk/tasks/main.yml b/tests/integration/targets/vm__remove_disk/tasks/main.yml new file mode 100644 index 00000000..c17747f3 --- /dev/null +++ b/tests/integration/targets/vm__remove_disk/tasks/main.yml @@ -0,0 +1,40 @@ +--- +# This is a part of the vm module; testing vm disk remove +# disk remove might require VM to be shutdown, or maybe VM can remain running. +# Module should shutdown VM only if it is allowed to shutdown (force_reboot=True) and +# if disk cannot be removed without shutdown. + +# Test with IDE disk, remove fails much faster compared to virtio disk (2 vs 60 sec). +# See also https://github.com/ScaleComputing/HyperCoreAnsibleCollection/issues/249 + +- environment: + SC_HOST: "{{ sc_host }}" + SC_USERNAME: "{{ sc_config[sc_host].sc_username }}" + SC_PASSWORD: "{{ sc_config[sc_host].sc_password }}" + SC_TIMEOUT: "{{ sc_timeout }}" + vars: + vm_name_a: "vm--remove-disk--a" + vm_names_all: + - "{{ vm_name_a }}" + shutdown_timeout: 30 + + block: + - include_tasks: 01_remove_disk_stopped.yml + - include_tasks: 02_remove_disk_running.yml + - include_tasks: 03_remove_disk_running_with_reboot.yml + vars: + disk_type: ide_disk + expected_vm_reboot: True + - name: Get HyperCore version + scale_computing.hypercore.cluster_info: + register: cluster_info + - include_tasks: 03_remove_disk_running_with_reboot.yml + vars: + disk_type: virtio_disk + # HyperCore 9.1.14 could remove disk from running VM + # HyperCore 9.2.13, 9.2.17 could not remove disk from running VM + expected_vm_reboot: "{{ cluster_info.record.icos_version.startswith('9.2') }}" + - include_tasks: 03_remove_disk_running_with_reboot.yml + vars: + disk_type: scsi_disk + expected_vm_reboot: False diff --git a/tests/integration/targets/vm_disk__remove_disk/tasks/01_remove_disk_stopped.yml b/tests/integration/targets/vm_disk__remove_disk/tasks/01_remove_disk_stopped.yml new file mode 100644 index 00000000..6508f07c --- /dev/null +++ b/tests/integration/targets/vm_disk__remove_disk/tasks/01_remove_disk_stopped.yml @@ -0,0 +1,96 @@ +--- +# ------------------------------------------------------------------------------------------------------------------ +# Cleanup + +- name: Delete the VMs, if they exist from before + scale_computing.hypercore.vm: + vm_name: "{{ item }}" + state: absent + loop: "{{ vm_names_all }}" + +# ------------------------------------------------------------------------------------------------------------------ +# Prepare +# Create VM a +- name: Create the VM {{ vm_name_a }} + scale_computing.hypercore.vm: + vm_name: "{{ vm_name_a }}" + state: present + description: VM remove disk CI test + tags: + - Xlab + memory: "{{ '512 MB' | human_to_bytes }}" + vcpu: 1 + attach_guest_tools_iso: false + power_state: stop + nics: [] + boot_devices: [] + disks: + - type: ide_disk + disk_slot: 0 + size: "{{ '10 GB' | human_to_bytes }}" + - type: ide_disk + disk_slot: 1 + size: "{{ '11 GB' | human_to_bytes }}" + register: vm_result +- name: Get info about VM {{ vm_name_a }} + scale_computing.hypercore.vm_info: + vm_name: "{{ vm_name_a }}" + register: vm_info_a_initial_result +- ansible.builtin.assert: + that: + - vm_result is changed + - vm_result.record.0.description == "VM remove disk CI test" + - vm_result.record.0.vm_name == "{{ vm_name_a }}" + - vm_result.record.0.disks | length == 2 + - vm_result.vm_rebooted == False + - vm_info_a_initial_result.records.0.description == "VM remove disk CI test" + - vm_info_a_initial_result.records.0.vm_name == "{{ vm_name_a }}" + - vm_info_a_initial_result.records.0.power_state == "stopped" + - vm_info_a_initial_result.records.0.disks | length == 2 + +# ------------------------------------------------------------------------------------------------------------------ +# Remove disk from stopped VM +# Shutdown is not allowed, and also not needed. +- name: Remove disk from stopped VM + block: &remove_disk_from_vm + - name: Remove disk from stopped VM {{ vm_name_a }} + scale_computing.hypercore.vm_disk: + vm_name: "{{ vm_name_a }}" + state: set + items: + # remove 1st disk, keep 2nd disk + - type: ide_disk + disk_slot: 1 + size: "{{ '11 GB' | human_to_bytes }}" + force_reboot: True + shutdown_timeout: "{{ shutdown_timeout }}" + register: vm_disk_result + - name: Get info about VM {{ vm_name_a }} + scale_computing.hypercore.vm_info: + vm_name: "{{ vm_name_a }}" + register: vm_info_a_result + - ansible.builtin.assert: + that: + - vm_disk_result is succeeded + - vm_disk_result.vm_rebooted == False + - vm_info_a_result.records.0.power_state == "stopped" + - vm_info_a_result.records.0.disks | length == 1 + - vm_info_a_result.records.0.disks.0.uuid == vm_info_a_initial_result.records.0.disks.1.uuid +- ansible.builtin.assert: + that: + - vm_disk_result is changed + +- name: Remove disk from stopped VM - idempotence + block: + *remove_disk_from_vm +- ansible.builtin.assert: + that: + - vm_disk_result is not changed + + +# ----------------------------------Cleanup-------------------------------------------------------------------------------- +- name: Delete the VMs + scale_computing.hypercore.vm: + vm_name: "{{ item }}" + state: absent + loop: "{{ vm_names_all }}" diff --git a/tests/integration/targets/vm_disk__remove_disk/tasks/02_remove_disk_running.yml b/tests/integration/targets/vm_disk__remove_disk/tasks/02_remove_disk_running.yml new file mode 100644 index 00000000..ef8a0b88 --- /dev/null +++ b/tests/integration/targets/vm_disk__remove_disk/tasks/02_remove_disk_running.yml @@ -0,0 +1,91 @@ +--- +# ------------------------------------------------------------------------------------------------------------------ +# Cleanup + +- name: Delete the VMs, if they exist from before + scale_computing.hypercore.vm: + vm_name: "{{ item }}" + state: absent + loop: "{{ vm_names_all }}" + +# ------------------------------------------------------------------------------------------------------------------ +# Prepare +# Create VM a +- name: Create the VM {{ vm_name_a }} + scale_computing.hypercore.vm: + vm_name: "{{ vm_name_a }}" + state: present + description: VM remove disk CI test + tags: + - Xlab + memory: "{{ '512 MB' | human_to_bytes }}" + vcpu: 1 + attach_guest_tools_iso: false + power_state: start + nics: [] + boot_devices: [] + disks: + - type: ide_disk + disk_slot: 0 + size: "{{ '10 GB' | human_to_bytes }}" + - type: ide_disk + disk_slot: 1 + size: "{{ '11 GB' | human_to_bytes }}" + register: vm_result +- name: Get info about VM {{ vm_name_a }} + scale_computing.hypercore.vm_info: + vm_name: "{{ vm_name_a }}" + register: vm_info_a_initial_result +- ansible.builtin.assert: + that: + - vm_result is changed + - vm_result.record.0.description == "VM remove disk CI test" + - vm_result.record.0.vm_name == "{{ vm_name_a }}" + - vm_result.record.0.disks | length == 2 + - vm_result.vm_rebooted == False + - vm_info_a_initial_result.records.0.description == "VM remove disk CI test" + - vm_info_a_initial_result.records.0.vm_name == "{{ vm_name_a }}" + - vm_info_a_initial_result.records.0.power_state == "started" + - vm_info_a_initial_result.records.0.disks | length == 2 + +# ------------------------------------------------------------------------------------------------------------------ +# Remove disk from running VM +# Shutdown is not allowed, and module will fail. +- name: Remove disk from running VM + block: &remove_disk_from_vm + - name: Remove disk from running VM {{ vm_name_a }} + scale_computing.hypercore.vm_disk: + vm_name: "{{ vm_name_a }}" + state: set + items: + # remove 1st disk, keep 2nd disk + - type: ide_disk + disk_slot: 1 + size: "{{ '11 GB' | human_to_bytes }}" + force_reboot: False + shutdown_timeout: "{{ shutdown_timeout }}" + register: vm_disk_result + ignore_errors: True + - name: Get info about VM {{ vm_name_a }} + scale_computing.hypercore.vm_info: + vm_name: "{{ vm_name_a }}" + register: vm_info_a_result + - ansible.builtin.assert: + that: + - vm_disk_result is failed + - vm_disk_result is not changed + - vm_info_a_result.records.0.power_state == "started" + - vm_info_a_result.records.0.disks | length == 2 + - vm_info_a_result.records.0.disks.0.uuid == vm_info_a_initial_result.records.0.disks.0.uuid + - vm_info_a_result.records.0.disks.1.uuid == vm_info_a_initial_result.records.0.disks.1.uuid + +- name: Remove disk from running VM - idempotence + block: + *remove_disk_from_vm + +# ----------------------------------Cleanup-------------------------------------------------------------------------------- +- name: Delete the VMs + scale_computing.hypercore.vm: + vm_name: "{{ item }}" + state: absent + loop: "{{ vm_names_all }}" diff --git a/tests/integration/targets/vm_disk__remove_disk/tasks/03_remove_disk_running_with_reboot.yml b/tests/integration/targets/vm_disk__remove_disk/tasks/03_remove_disk_running_with_reboot.yml new file mode 100644 index 00000000..0f89761a --- /dev/null +++ b/tests/integration/targets/vm_disk__remove_disk/tasks/03_remove_disk_running_with_reboot.yml @@ -0,0 +1,96 @@ +--- +# ------------------------------------------------------------------------------------------------------------------ +# Cleanup + +- name: Delete the VMs, if they exist from before + scale_computing.hypercore.vm: + vm_name: "{{ item }}" + state: absent + loop: "{{ vm_names_all }}" + +# ------------------------------------------------------------------------------------------------------------------ +# Prepare +# Create VM a +- name: Create the VM {{ vm_name_a }} + scale_computing.hypercore.vm: + vm_name: "{{ vm_name_a }}" + state: present + description: VM remove disk CI test + tags: + - Xlab + memory: "{{ '512 MB' | human_to_bytes }}" + vcpu: 1 + attach_guest_tools_iso: false + power_state: start + nics: [] + boot_devices: [] + disks: + - type: ide_disk + disk_slot: 0 + size: "{{ '10 GB' | human_to_bytes }}" + - type: ide_disk + disk_slot: 1 + size: "{{ '11 GB' | human_to_bytes }}" + register: vm_result +- name: Get info about VM {{ vm_name_a }} + scale_computing.hypercore.vm_info: + vm_name: "{{ vm_name_a }}" + register: vm_info_a_initial_result +- ansible.builtin.assert: + that: + - vm_result is changed + - vm_result.record.0.description == "VM remove disk CI test" + - vm_result.record.0.vm_name == "{{ vm_name_a }}" + - vm_result.record.0.disks | length == 2 + - vm_result.vm_rebooted == False + - vm_info_a_initial_result.records.0.description == "VM remove disk CI test" + - vm_info_a_initial_result.records.0.vm_name == "{{ vm_name_a }}" + - vm_info_a_initial_result.records.0.power_state == "started" + - vm_info_a_initial_result.records.0.disks | length == 2 + +# ------------------------------------------------------------------------------------------------------------------ +# Remove disk from running VM +# Shutdown is allowed, and needed. +- name: Remove disk from running VM with reboot + block: &remove_disk_from_vm + - name: Remove disk from running VM {{ vm_name_a }} + scale_computing.hypercore.vm_disk: + vm_name: "{{ vm_name_a }}" + state: set + items: + # remove 1st disk, keep 2nd disk + - type: ide_disk + disk_slot: 1 + size: "{{ '11 GB' | human_to_bytes }}" + force_reboot: True + shutdown_timeout: "{{ shutdown_timeout }}" + register: vm_disk_result + - name: Get info about VM {{ vm_name_a }} + scale_computing.hypercore.vm_info: + vm_name: "{{ vm_name_a }}" + register: vm_info_a_result + - ansible.builtin.assert: + that: + - vm_disk_result is succeeded + - vm_info_a_result.records.0.power_state == "started" + - vm_info_a_result.records.0.disks | length == 1 + - vm_info_a_result.records.0.disks.0.uuid == vm_info_a_initial_result.records.0.disks.1.uuid +- ansible.builtin.assert: + that: + - vm_disk_result is changed + - vm_disk_result.vm_rebooted == True + +- name: Remove disk from running VM with reboot - idempotence + block: + *remove_disk_from_vm +- ansible.builtin.assert: + that: + - vm_disk_result is not changed + - vm_disk_result.vm_rebooted == False + +# ----------------------------------Cleanup-------------------------------------------------------------------------------- +- name: Delete the VMs + scale_computing.hypercore.vm: + vm_name: "{{ item }}" + state: absent + loop: "{{ vm_names_all }}" diff --git a/tests/integration/targets/vm_disk__remove_disk/tasks/main.yml b/tests/integration/targets/vm_disk__remove_disk/tasks/main.yml new file mode 100644 index 00000000..fe1460e4 --- /dev/null +++ b/tests/integration/targets/vm_disk__remove_disk/tasks/main.yml @@ -0,0 +1,24 @@ +--- +# This is a part of the vm module; testing vm disk remove +# disk remove might require VM to be shutdown, or maybe VM can remain running. +# Module should shutdown VM only if it is allowed to shutdown (force_reboot=True) and +# if disk cannot be removed without shutdown. + +# Test with IDE disk, remove fails much faster compared to virtio disk (2 vs 60 sec). +# See also https://github.com/ScaleComputing/HyperCoreAnsibleCollection/issues/249 + +- environment: + SC_HOST: "{{ sc_host }}" + SC_USERNAME: "{{ sc_config[sc_host].sc_username }}" + SC_PASSWORD: "{{ sc_config[sc_host].sc_password }}" + SC_TIMEOUT: "{{ sc_timeout }}" + vars: + vm_name_a: "vm-disk--remove-disk--a" + vm_names_all: + - "{{ vm_name_a }}" + shutdown_timeout: 30 + + block: + - include_tasks: 01_remove_disk_stopped.yml + - include_tasks: 02_remove_disk_running.yml + - include_tasks: 03_remove_disk_running_with_reboot.yml