Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

storage: tests_luks.yml partition case failed with nvme disk #141

Closed
yizhanglinux opened this issue Aug 4, 2020 · 1 comment · Fixed by #153
Closed

storage: tests_luks.yml partition case failed with nvme disk #141

yizhanglinux opened this issue Aug 4, 2020 · 1 comment · Fixed by #153
Assignees

Comments

@yizhanglinux
Copy link
Collaborator

This issue can be reproduced with NVMe disk on RHEL7/8

playbook

# cat tests_luks.yml
---
- hosts: all
  become: true
  vars:
    storage_safe_mode: false
    mount_location: '/opt/test1'
    volume_size: '5g'

  tasks:
    - include_role:
        name: storage

    - include_tasks: get_unused_disk.yml
      vars:
        min_size: "{{ volume_size }}"
        max_return: 1

    ##
    ## Partition
    ##

    - name: Create an encrypted partition volume w/ default fs
      include_role:
        name: storage
      vars:
        storage_pools:
          - name: foo
            type: partition
            disks: "{{ unused_disks }}"
            volumes:
              - name: test1
                type: partition
                mount_point: "{{ mount_location }}"
                #                size: 4g
                encryption: true
                encryption_passphrase: 'yabbadabbadoo'

    - include_tasks: verify-role-results.yml

    - name: Remove the encryption layer
      include_role:
        name: storage
      vars:
        storage_pools:
          - name: foo
            type: partition
            disks: "{{ unused_disks }}"
            volumes:
              - name: test1
                type: partition
                mount_point: "{{ mount_location }}"
                #                size: 4g
                encryption: false
                encryption_passphrase: 'yabbadabbadoo'

    - include_tasks: verify-role-results.yml

Detailed Log

# ansible-playbook -i inventory tests/tests_luks.yml -vvv

[root@storageqe-62 storage]# cat OUTPUT 
ansible-playbook 2.9.11
  config file = /etc/ansible/ansible.cfg
  configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']
  ansible python module location = /usr/lib/python3.6/site-packages/ansible
  executable location = /usr/bin/ansible-playbook
  python version = 3.6.8 (default, Jun 26 2020, 12:10:09) [GCC 8.3.1 20191121 (Red Hat 8.3.1-5)]
Using /etc/ansible/ansible.cfg as config file
host_list declined parsing /root/test/storage/inventory as it did not pass its verify_file() method
script declined parsing /root/test/storage/inventory as it did not pass its verify_file() method
auto declined parsing /root/test/storage/inventory as it did not pass its verify_file() method
Parsed /root/test/storage/inventory inventory source with ini plugin

PLAYBOOK: tests_luks.yml ********************************************************************************************************************************************************************************
1 plays in tests/tests_luks.yml

PLAY [all] **********************************************************************************************************************************************************************************************

TASK [Gathering Facts] **********************************************************************************************************************************************************************************
task path: /root/test/storage/tests/tests_luks.yml:2
ESTABLISH LOCAL CONNECTION FOR USER: root
EXEC /bin/sh -c 'echo ~root && sleep 0'
EXEC /bin/sh -c '( umask 77 && mkdir -p "echo /root/.ansible/tmp"&& mkdir /root/.ansible/tmp/ansible-tmp-1596551654.528671-15523-246813385857590 && echo ansible-tmp-1596551654.528671-15523-246813385857590="echo /root/.ansible/tmp/ansible-tmp-1596551654.528671-15523-246813385857590" ) && sleep 0'
Attempting python interpreter discovery
EXEC /bin/sh -c 'echo PLATFORM; uname; echo FOUND; command -v '"'"'/usr/bin/python'"'"'; command -v '"'"'python3.7'"'"'; command -v '"'"'python3.6'"'"'; command -v '"'"'python3.5'"'"'; command -v '"'"'python2.7'"'"'; command -v '"'"'python2.6'"'"'; command -v '"'"'/usr/libexec/platform-python'"'"'; command -v '"'"'/usr/bin/python3'"'"'; command -v '"'"'python'"'"'; echo ENDFOUND && sleep 0'
EXEC /bin/sh -c '/usr/bin/python3.6 && sleep 0'
Using module file /usr/lib/python3.6/site-packages/ansible/modules/system/setup.py
PUT /root/.ansible/tmp/ansible-local-15515o7pn3a74/tmp4s2clgmr TO /root/.ansible/tmp/ansible-tmp-1596551654.528671-15523-246813385857590/AnsiballZ_setup.py
EXEC /bin/sh -c 'chmod u+x /root/.ansible/tmp/ansible-tmp-1596551654.528671-15523-246813385857590/ /root/.ansible/tmp/ansible-tmp-1596551654.528671-15523-246813385857590/AnsiballZ_setup.py && sleep 0'
EXEC /bin/sh -c '/usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1596551654.528671-15523-246813385857590/AnsiballZ_setup.py && sleep 0'
EXEC /bin/sh -c 'rm -f -r /root/.ansible/tmp/ansible-tmp-1596551654.528671-15523-246813385857590/ > /dev/null 2>&1 && sleep 0'
ok: [localhost]
META: ran handlers

TASK [include_role : storage] ***************************************************************************************************************************************************************************
task path: /root/test/storage/tests/tests_luks.yml:10

TASK [storage : Set version specific variables] *********************************************************************************************************************************************************
task path: /root/test/storage/tasks/main.yml:2
ok: [localhost] => {
"ansible_facts": {
"blivet_package_list": [
"python3-blivet",
"libblockdev-crypto",
"libblockdev-dm",
"libblockdev-lvm",
"libblockdev-mdraid",
"libblockdev-swap"
]
},
"ansible_included_var_files": [
"/root/test/storage/vars/RedHat_8.yml"
],
"changed": false
}

TASK [storage : define an empty list of pools to be used in testing] ************************************************************************************************************************************
task path: /root/test/storage/tasks/main.yml:14
ok: [localhost] => {
"ansible_facts": {
"_storage_pools_list": []
},
"changed": false
}

TASK [storage : define an empty list of volumes to be used in testing] **********************************************************************************************************************************
task path: /root/test/storage/tasks/main.yml:18
ok: [localhost] => {
"ansible_facts": {
"_storage_volumes_list": []
},
"changed": false
}

TASK [storage : include the appropriate provider tasks] *************************************************************************************************************************************************
task path: /root/test/storage/tasks/main.yml:22
included: /root/test/storage/tasks/main-blivet.yml for localhost

TASK [storage : get a list of rpm packages installed on host machine] ***********************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:2
skipping: [localhost] => {
"changed": false,
"skip_reason": "Conditional result was False"
}

TASK [storage : make sure blivet is available] **********************************************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:7
ESTABLISH LOCAL CONNECTION FOR USER: root
EXEC /bin/sh -c 'echo ~root && sleep 0'
EXEC /bin/sh -c '( umask 77 && mkdir -p "echo /root/.ansible/tmp"&& mkdir /root/.ansible/tmp/ansible-tmp-1596551658.1332204-15631-187703718888323 && echo ansible-tmp-1596551658.1332204-15631-187703718888323="echo /root/.ansible/tmp/ansible-tmp-1596551658.1332204-15631-187703718888323" ) && sleep 0'
Using module file /usr/lib/python3.6/site-packages/ansible/modules/packaging/os/dnf.py
PUT /root/.ansible/tmp/ansible-local-15515o7pn3a74/tmpv7a6bt6k TO /root/.ansible/tmp/ansible-tmp-1596551658.1332204-15631-187703718888323/AnsiballZ_dnf.py
EXEC /bin/sh -c 'chmod u+x /root/.ansible/tmp/ansible-tmp-1596551658.1332204-15631-187703718888323/ /root/.ansible/tmp/ansible-tmp-1596551658.1332204-15631-187703718888323/AnsiballZ_dnf.py && sleep 0'
EXEC /bin/sh -c '/usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1596551658.1332204-15631-187703718888323/AnsiballZ_dnf.py && sleep 0'
EXEC /bin/sh -c 'rm -f -r /root/.ansible/tmp/ansible-tmp-1596551658.1332204-15631-187703718888323/ > /dev/null 2>&1 && sleep 0'
changed: [localhost] => {
"changed": true,
"invocation": {
"module_args": {
"allow_downgrade": false,
"autoremove": false,
"bugfix": false,
"conf_file": null,
"disable_excludes": null,
"disable_gpg_check": false,
"disable_plugin": [],
"disablerepo": [],
"download_dir": null,
"download_only": false,
"enable_plugin": [],
"enablerepo": [],
"exclude": [],
"install_repoquery": true,
"install_weak_deps": true,
"installroot": "/",
"list": null,
"lock_timeout": 30,
"name": [
"python3-blivet",
"libblockdev-crypto",
"libblockdev-dm",
"libblockdev-lvm",
"libblockdev-mdraid",
"libblockdev-swap"
],
"releasever": null,
"security": false,
"skip_broken": false,
"state": "present",
"update_cache": false,
"update_only": false,
"validate_certs": true
}
},
"msg": "",
"rc": 0,
"results": [
"Installed: nss-softokn-freebl-3.44.0-15.el8.x86_64",
"Installed: nss-sysinit-3.44.0-15.el8.x86_64",
"Installed: libblockdev-2.24-1.el8.x86_64",
"Installed: nss-util-3.44.0-15.el8.x86_64",
"Installed: libblockdev-crypto-2.24-1.el8.x86_64",
"Installed: daxctl-libs-67-2.el8.x86_64",
"Installed: libblockdev-dm-2.24-1.el8.x86_64",
"Installed: libblockdev-fs-2.24-1.el8.x86_64",
"Installed: libblockdev-kbd-2.24-1.el8.x86_64",
"Installed: gdisk-1.0.3-6.el8.x86_64",
"Installed: libblockdev-loop-2.24-1.el8.x86_64",
"Installed: libblockdev-lvm-2.24-1.el8.x86_64",
"Installed: libblockdev-mdraid-2.24-1.el8.x86_64",
"Installed: libblockdev-mpath-2.24-1.el8.x86_64",
"Installed: libblockdev-nvdimm-2.24-1.el8.x86_64",
"Installed: libblockdev-part-2.24-1.el8.x86_64",
"Installed: device-mapper-multipath-0.8.4-2.el8.x86_64",
"Installed: libblockdev-swap-2.24-1.el8.x86_64",
"Installed: device-mapper-multipath-libs-0.8.4-2.el8.x86_64",
"Installed: libblockdev-utils-2.24-1.el8.x86_64",
"Installed: libbytesize-1.4-3.el8.x86_64",
"Installed: lsof-4.93.2-1.el8.x86_64",
"Installed: mdadm-4.1-14.el8.x86_64",
"Installed: userspace-rcu-0.10.1-2.el8.x86_64",
"Installed: python3-pyparted-1:3.11.0-13.el8.x86_64",
"Installed: nspr-4.21.0-2.el8_0.x86_64",
"Installed: volume_key-libs-0.3.11-5.el8.x86_64",
"Installed: ndctl-67-2.el8.x86_64",
"Installed: blivet-data-1:3.2.2-3.el8.noarch",
"Installed: nss-3.44.0-15.el8.x86_64",
"Installed: ndctl-libs-67-2.el8.x86_64",
"Installed: python3-blivet-1:3.2.2-3.el8.noarch",
"Installed: python3-blockdev-2.24-1.el8.x86_64",
"Installed: python3-bytesize-1.4-3.el8.x86_64",
"Installed: nss-softokn-3.44.0-15.el8.x86_64"
]
}

TASK [storage : initialize internal facts] **************************************************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:18
ok: [localhost] => {
"ansible_facts": {
"_storage_pools": [],
"_storage_vol_defaults": [],
"_storage_vol_pools": [],
"_storage_vols_no_defaults": [],
"_storage_vols_no_defaults_by_pool": {},
"_storage_vols_w_defaults": [],
"_storage_volumes": []
},
"changed": false
}

TASK [storage : Apply defaults to pools and volumes [1/6]] **********************************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:28

TASK [storage : Apply defaults to pools and volumes [2/6]] **********************************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:36

TASK [storage : Apply defaults to pools and volumes [3/6]] **********************************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:44

TASK [storage : Apply defaults to pools and volumes [4/6]] **********************************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:52

TASK [storage : Apply defaults to pools and volumes [5/6]] **********************************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:61

TASK [storage : Apply defaults to pools and volumes [6/6]] **********************************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:72

TASK [storage : debug] **********************************************************************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:84
ok: [localhost] => {
"_storage_pools": []
}

TASK [storage : debug] **********************************************************************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:87
ok: [localhost] => {
"_storage_volumes": []
}

TASK [storage : get required packages] ******************************************************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:90
ESTABLISH LOCAL CONNECTION FOR USER: root
EXEC /bin/sh -c 'echo ~root && sleep 0'
EXEC /bin/sh -c '( umask 77 && mkdir -p "echo /root/.ansible/tmp"&& mkdir /root/.ansible/tmp/ansible-tmp-1596551675.9051943-16220-178753882587611 && echo ansible-tmp-1596551675.9051943-16220-178753882587611="echo /root/.ansible/tmp/ansible-tmp-1596551675.9051943-16220-178753882587611" ) && sleep 0'
Using module file /root/test/storage/library/blivet.py
PUT /root/.ansible/tmp/ansible-local-15515o7pn3a74/tmpbiozo0to TO /root/.ansible/tmp/ansible-tmp-1596551675.9051943-16220-178753882587611/AnsiballZ_blivet.py
EXEC /bin/sh -c 'chmod u+x /root/.ansible/tmp/ansible-tmp-1596551675.9051943-16220-178753882587611/ /root/.ansible/tmp/ansible-tmp-1596551675.9051943-16220-178753882587611/AnsiballZ_blivet.py && sleep 0'
EXEC /bin/sh -c '/usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1596551675.9051943-16220-178753882587611/AnsiballZ_blivet.py && sleep 0'
EXEC /bin/sh -c 'rm -f -r /root/.ansible/tmp/ansible-tmp-1596551675.9051943-16220-178753882587611/ > /dev/null 2>&1 && sleep 0'
ok: [localhost] => {
"actions": [],
"changed": false,
"crypts": [],
"invocation": {
"module_args": {
"disklabel_type": null,
"packages_only": true,
"pools": [],
"safe_mode": true,
"use_partitions": null,
"volumes": []
}
},
"leaves": [],
"mounts": [],
"packages": [],
"pools": [],
"volumes": []
}

TASK [storage : make sure required packages are installed] **********************************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:99
ESTABLISH LOCAL CONNECTION FOR USER: root
EXEC /bin/sh -c 'echo ~root && sleep 0'
EXEC /bin/sh -c '( umask 77 && mkdir -p "echo /root/.ansible/tmp"&& mkdir /root/.ansible/tmp/ansible-tmp-1596551677.1227002-16238-163363811539987 && echo ansible-tmp-1596551677.1227002-16238-163363811539987="echo /root/.ansible/tmp/ansible-tmp-1596551677.1227002-16238-163363811539987" ) && sleep 0'
Using module file /usr/lib/python3.6/site-packages/ansible/modules/packaging/os/dnf.py
PUT /root/.ansible/tmp/ansible-local-15515o7pn3a74/tmph5isg7r4 TO /root/.ansible/tmp/ansible-tmp-1596551677.1227002-16238-163363811539987/AnsiballZ_dnf.py
EXEC /bin/sh -c 'chmod u+x /root/.ansible/tmp/ansible-tmp-1596551677.1227002-16238-163363811539987/ /root/.ansible/tmp/ansible-tmp-1596551677.1227002-16238-163363811539987/AnsiballZ_dnf.py && sleep 0'
EXEC /bin/sh -c '/usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1596551677.1227002-16238-163363811539987/AnsiballZ_dnf.py && sleep 0'
EXEC /bin/sh -c 'rm -f -r /root/.ansible/tmp/ansible-tmp-1596551677.1227002-16238-163363811539987/ > /dev/null 2>&1 && sleep 0'
ok: [localhost] => {
"changed": false,
"invocation": {
"module_args": {
"allow_downgrade": false,
"autoremove": false,
"bugfix": false,
"conf_file": null,
"disable_excludes": null,
"disable_gpg_check": false,
"disable_plugin": [],
"disablerepo": [],
"download_dir": null,
"download_only": false,
"enable_plugin": [],
"enablerepo": [],
"exclude": [],
"install_repoquery": true,
"install_weak_deps": true,
"installroot": "/",
"list": null,
"lock_timeout": 30,
"name": [],
"releasever": null,
"security": false,
"skip_broken": false,
"state": "present",
"update_cache": false,
"update_only": false,
"validate_certs": true
}
},
"msg": "Nothing to do",
"rc": 0,
"results": []
}

TASK [storage : manage the pools and volumes to match the specified state] ******************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:104
ESTABLISH LOCAL CONNECTION FOR USER: root
EXEC /bin/sh -c 'echo ~root && sleep 0'
EXEC /bin/sh -c '( umask 77 && mkdir -p "echo /root/.ansible/tmp"&& mkdir /root/.ansible/tmp/ansible-tmp-1596551680.7664928-16254-120747266880603 && echo ansible-tmp-1596551680.7664928-16254-120747266880603="echo /root/.ansible/tmp/ansible-tmp-1596551680.7664928-16254-120747266880603" ) && sleep 0'
Using module file /root/test/storage/library/blivet.py
PUT /root/.ansible/tmp/ansible-local-15515o7pn3a74/tmpsx6rcxev TO /root/.ansible/tmp/ansible-tmp-1596551680.7664928-16254-120747266880603/AnsiballZ_blivet.py
EXEC /bin/sh -c 'chmod u+x /root/.ansible/tmp/ansible-tmp-1596551680.7664928-16254-120747266880603/ /root/.ansible/tmp/ansible-tmp-1596551680.7664928-16254-120747266880603/AnsiballZ_blivet.py && sleep 0'
EXEC /bin/sh -c '/usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1596551680.7664928-16254-120747266880603/AnsiballZ_blivet.py && sleep 0'
EXEC /bin/sh -c 'rm -f -r /root/.ansible/tmp/ansible-tmp-1596551680.7664928-16254-120747266880603/ > /dev/null 2>&1 && sleep 0'
ok: [localhost] => {
"actions": [],
"changed": false,
"crypts": [],
"invocation": {
"module_args": {
"disklabel_type": null,
"packages_only": false,
"pools": [],
"safe_mode": false,
"use_partitions": null,
"volumes": []
}
},
"leaves": [],
"mounts": [],
"packages": [],
"pools": [],
"volumes": []
}

TASK [storage : debug] **********************************************************************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:113
ok: [localhost] => {
"blivet_output": {
"actions": [],
"changed": false,
"crypts": [],
"failed": false,
"leaves": [],
"mounts": [],
"packages": [],
"pools": [],
"volumes": []
}
}

TASK [storage : set the list of pools for test verification] ********************************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:116
ok: [localhost] => {
"ansible_facts": {
"_storage_pools_list": []
},
"changed": false
}

TASK [storage : set the list of volumes for test verification] ******************************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:120
ok: [localhost] => {
"ansible_facts": {
"_storage_volumes_list": []
},
"changed": false
}

TASK [storage : remove obsolete mounts] *****************************************************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:136

TASK [storage : tell systemd to refresh its view of /etc/fstab] *****************************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:147
skipping: [localhost] => {
"changed": false,
"skip_reason": "Conditional result was False"
}

TASK [storage : set up new/current mounts] **************************************************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:152

TASK [storage : tell systemd to refresh its view of /etc/fstab] *****************************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:163
skipping: [localhost] => {
"changed": false,
"skip_reason": "Conditional result was False"
}

TASK [storage : Manage /etc/crypttab to account for changes we just made] *******************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:171

TASK [storage : Update facts] ***************************************************************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:186
ESTABLISH LOCAL CONNECTION FOR USER: root
EXEC /bin/sh -c 'echo ~root && sleep 0'
EXEC /bin/sh -c '( umask 77 && mkdir -p "echo /root/.ansible/tmp"&& mkdir /root/.ansible/tmp/ansible-tmp-1596551682.4189703-16288-269707015022603 && echo ansible-tmp-1596551682.4189703-16288-269707015022603="echo /root/.ansible/tmp/ansible-tmp-1596551682.4189703-16288-269707015022603" ) && sleep 0'
Using module file /usr/lib/python3.6/site-packages/ansible/modules/system/setup.py
PUT /root/.ansible/tmp/ansible-local-15515o7pn3a74/tmppvp3cca_ TO /root/.ansible/tmp/ansible-tmp-1596551682.4189703-16288-269707015022603/AnsiballZ_setup.py
EXEC /bin/sh -c 'chmod u+x /root/.ansible/tmp/ansible-tmp-1596551682.4189703-16288-269707015022603/ /root/.ansible/tmp/ansible-tmp-1596551682.4189703-16288-269707015022603/AnsiballZ_setup.py && sleep 0'
EXEC /bin/sh -c '/usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1596551682.4189703-16288-269707015022603/AnsiballZ_setup.py && sleep 0'
EXEC /bin/sh -c 'rm -f -r /root/.ansible/tmp/ansible-tmp-1596551682.4189703-16288-269707015022603/ > /dev/null 2>&1 && sleep 0'
ok: [localhost]

TASK [include_tasks] ************************************************************************************************************************************************************************************
task path: /root/test/storage/tests/tests_luks.yml:13
included: /root/test/storage/tests/get_unused_disk.yml for localhost

TASK [Find unused disks in the system] ******************************************************************************************************************************************************************
task path: /root/test/storage/tests/get_unused_disk.yml:2
ESTABLISH LOCAL CONNECTION FOR USER: root
EXEC /bin/sh -c 'echo ~root && sleep 0'
EXEC /bin/sh -c '( umask 77 && mkdir -p "echo /root/.ansible/tmp"&& mkdir /root/.ansible/tmp/ansible-tmp-1596551684.3667912-16397-128580638255759 && echo ansible-tmp-1596551684.3667912-16397-128580638255759="echo /root/.ansible/tmp/ansible-tmp-1596551684.3667912-16397-128580638255759" ) && sleep 0'
Using module file /root/test/storage/library/find_unused_disk.py
PUT /root/.ansible/tmp/ansible-local-15515o7pn3a74/tmpmqn72f82 TO /root/.ansible/tmp/ansible-tmp-1596551684.3667912-16397-128580638255759/AnsiballZ_find_unused_disk.py
EXEC /bin/sh -c 'chmod u+x /root/.ansible/tmp/ansible-tmp-1596551684.3667912-16397-128580638255759/ /root/.ansible/tmp/ansible-tmp-1596551684.3667912-16397-128580638255759/AnsiballZ_find_unused_disk.py && sleep 0'
EXEC /bin/sh -c '/usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1596551684.3667912-16397-128580638255759/AnsiballZ_find_unused_disk.py && sleep 0'
EXEC /bin/sh -c 'rm -f -r /root/.ansible/tmp/ansible-tmp-1596551684.3667912-16397-128580638255759/ > /dev/null 2>&1 && sleep 0'
ok: [localhost] => {
"changed": false,
"disks": [
"nvme0n1"
],
"invocation": {
"module_args": {
"max_return": 1,
"min_size": "5g"
}
}
}

TASK [set_fact] *****************************************************************************************************************************************************************************************
task path: /root/test/storage/tests/get_unused_disk.yml:8
ok: [localhost] => {
"ansible_facts": {
"unused_disks": [
"nvme0n1"
]
},
"changed": false
}

TASK [Exit playbook when there's not enough unused disks in the system] *********************************************************************************************************************************
task path: /root/test/storage/tests/get_unused_disk.yml:12
skipping: [localhost] => {
"changed": false,
"skip_reason": "Conditional result was False"
}

TASK [Print unused disks] *******************************************************************************************************************************************************************************
task path: /root/test/storage/tests/get_unused_disk.yml:17
ok: [localhost] => {
"unused_disks": [
"nvme0n1"
]
}

TASK [Create an encrypted partition volume w/ default fs] ***********************************************************************************************************************************************
task path: /root/test/storage/tests/tests_luks.yml:22

TASK [storage : Set version specific variables] *********************************************************************************************************************************************************
task path: /root/test/storage/tasks/main.yml:2
ok: [localhost] => {
"ansible_facts": {
"blivet_package_list": [
"python3-blivet",
"libblockdev-crypto",
"libblockdev-dm",
"libblockdev-lvm",
"libblockdev-mdraid",
"libblockdev-swap"
]
},
"ansible_included_var_files": [
"/root/test/storage/vars/RedHat_8.yml"
],
"changed": false
}

TASK [storage : define an empty list of pools to be used in testing] ************************************************************************************************************************************
task path: /root/test/storage/tasks/main.yml:14
ok: [localhost] => {
"ansible_facts": {
"_storage_pools_list": []
},
"changed": false
}

TASK [storage : define an empty list of volumes to be used in testing] **********************************************************************************************************************************
task path: /root/test/storage/tasks/main.yml:18
ok: [localhost] => {
"ansible_facts": {
"_storage_volumes_list": []
},
"changed": false
}

TASK [storage : include the appropriate provider tasks] *************************************************************************************************************************************************
task path: /root/test/storage/tasks/main.yml:22
included: /root/test/storage/tasks/main-blivet.yml for localhost

TASK [storage : get a list of rpm packages installed on host machine] ***********************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:2
skipping: [localhost] => {
"changed": false,
"skip_reason": "Conditional result was False"
}

TASK [storage : make sure blivet is available] **********************************************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:7
ESTABLISH LOCAL CONNECTION FOR USER: root
EXEC /bin/sh -c 'echo ~root && sleep 0'
EXEC /bin/sh -c '( umask 77 && mkdir -p "echo /root/.ansible/tmp"&& mkdir /root/.ansible/tmp/ansible-tmp-1596551686.3083057-16431-81065081504968 && echo ansible-tmp-1596551686.3083057-16431-81065081504968="echo /root/.ansible/tmp/ansible-tmp-1596551686.3083057-16431-81065081504968" ) && sleep 0'
Using module file /usr/lib/python3.6/site-packages/ansible/modules/packaging/os/dnf.py
PUT /root/.ansible/tmp/ansible-local-15515o7pn3a74/tmpcs3kyb78 TO /root/.ansible/tmp/ansible-tmp-1596551686.3083057-16431-81065081504968/AnsiballZ_dnf.py
EXEC /bin/sh -c 'chmod u+x /root/.ansible/tmp/ansible-tmp-1596551686.3083057-16431-81065081504968/ /root/.ansible/tmp/ansible-tmp-1596551686.3083057-16431-81065081504968/AnsiballZ_dnf.py && sleep 0'
EXEC /bin/sh -c '/usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1596551686.3083057-16431-81065081504968/AnsiballZ_dnf.py && sleep 0'
EXEC /bin/sh -c 'rm -f -r /root/.ansible/tmp/ansible-tmp-1596551686.3083057-16431-81065081504968/ > /dev/null 2>&1 && sleep 0'
ok: [localhost] => {
"changed": false,
"invocation": {
"module_args": {
"allow_downgrade": false,
"autoremove": false,
"bugfix": false,
"conf_file": null,
"disable_excludes": null,
"disable_gpg_check": false,
"disable_plugin": [],
"disablerepo": [],
"download_dir": null,
"download_only": false,
"enable_plugin": [],
"enablerepo": [],
"exclude": [],
"install_repoquery": true,
"install_weak_deps": true,
"installroot": "/",
"list": null,
"lock_timeout": 30,
"name": [
"python3-blivet",
"libblockdev-crypto",
"libblockdev-dm",
"libblockdev-lvm",
"libblockdev-mdraid",
"libblockdev-swap"
],
"releasever": null,
"security": false,
"skip_broken": false,
"state": "present",
"update_cache": false,
"update_only": false,
"validate_certs": true
}
},
"msg": "Nothing to do",
"rc": 0,
"results": []
}

TASK [storage : initialize internal facts] **************************************************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:18
ok: [localhost] => {
"ansible_facts": {
"_storage_pools": [],
"_storage_vol_defaults": [],
"_storage_vol_pools": [],
"_storage_vols_no_defaults": [],
"_storage_vols_no_defaults_by_pool": {},
"_storage_vols_w_defaults": [],
"_storage_volumes": []
},
"changed": false
}

TASK [storage : Apply defaults to pools and volumes [1/6]] **********************************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:28
ok: [localhost] => (item={'name': 'foo', 'type': 'partition', 'disks': ['nvme0n1'], 'volumes': [{'name': 'test1', 'type': 'partition', 'mount_point': '/opt/test1', 'encryption': True, 'encryption_passphrase': 'yabbadabbadoo'}]}) => {
"ansible_facts": {
"_storage_pools": [
{
"disks": [
"nvme0n1"
],
"encryption": false,
"encryption_cipher": null,
"encryption_key_file": null,
"encryption_key_size": null,
"encryption_luks_version": null,
"encryption_passphrase": null,
"name": "foo",
"raid_chunk_size": null,
"raid_device_count": null,
"raid_level": null,
"raid_metadata_version": null,
"raid_spare_count": null,
"state": "present",
"type": "partition",
"volumes": [
{
"encryption": true,
"encryption_passphrase": "yabbadabbadoo",
"mount_point": "/opt/test1",
"name": "test1",
"type": "partition"
}
]
}
]
},
"ansible_loop_var": "pool",
"changed": false,
"pool": {
"disks": [
"nvme0n1"
],
"name": "foo",
"type": "partition",
"volumes": [
{
"encryption": true,
"encryption_passphrase": "yabbadabbadoo",
"mount_point": "/opt/test1",
"name": "test1",
"type": "partition"
}
]
}
}

TASK [storage : Apply defaults to pools and volumes [2/6]] **********************************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:36
ok: [localhost] => (item=[{'state': 'present', 'type': 'partition', 'encryption': False, 'encryption_passphrase': None, 'encryption_key_file': None, 'encryption_cipher': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, 'name': 'foo', 'disks': ['nvme0n1'], 'volumes': [{'name': 'test1', 'type': 'partition', 'mount_point': '/opt/test1', 'encryption': True, 'encryption_passphrase': 'yabbadabbadoo'}]}, {'name': 'test1', 'type': 'partition', 'mount_point': '/opt/test1', 'encryption': True, 'encryption_passphrase': 'yabbadabbadoo'}]) => {
"ansible_facts": {
"_storage_vol_defaults": [
{
"encryption": false,
"encryption_cipher": null,
"encryption_key_file": null,
"encryption_key_size": null,
"encryption_luks_version": null,
"encryption_passphrase": null,
"fs_create_options": "",
"fs_label": "",
"fs_overwrite_existing": true,
"fs_type": "xfs",
"mount_check": 0,
"mount_device_identifier": "uuid",
"mount_options": "defaults",
"mount_passno": 0,
"mount_point": "",
"raid_chunk_size": null,
"raid_device_count": null,
"raid_level": null,
"raid_metadata_version": null,
"raid_spare_count": null,
"size": 0,
"state": "present",
"type": "lvm"
}
],
"_storage_vol_pools": [
"foo"
],
"_storage_vols_no_defaults": [
{
"encryption": true,
"encryption_passphrase": "yabbadabbadoo",
"mount_point": "/opt/test1",
"name": "test1",
"type": "partition"
}
]
},
"ansible_loop_var": "item",
"changed": false,
"item": [
{
"disks": [
"nvme0n1"
],
"encryption": false,
"encryption_cipher": null,
"encryption_key_file": null,
"encryption_key_size": null,
"encryption_luks_version": null,
"encryption_passphrase": null,
"name": "foo",
"raid_chunk_size": null,
"raid_device_count": null,
"raid_level": null,
"raid_metadata_version": null,
"raid_spare_count": null,
"state": "present",
"type": "partition",
"volumes": [
{
"encryption": true,
"encryption_passphrase": "yabbadabbadoo",
"mount_point": "/opt/test1",
"name": "test1",
"type": "partition"
}
]
},
{
"encryption": true,
"encryption_passphrase": "yabbadabbadoo",
"mount_point": "/opt/test1",
"name": "test1",
"type": "partition"
}
]
}

TASK [storage : Apply defaults to pools and volumes [3/6]] **********************************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:44
ok: [localhost] => (item=[{'name': 'test1', 'type': 'partition', 'mount_point': '/opt/test1', 'encryption': True, 'encryption_passphrase': 'yabbadabbadoo'}, {'state': 'present', 'type': 'lvm', 'size': 0, 'fs_type': 'xfs', 'fs_label': '', 'fs_create_options': '', 'fs_overwrite_existing': True, 'mount_point': '', 'mount_options': 'defaults', 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, 'encryption': False, 'encryption_passphrase': None, 'encryption_key_file': None, 'encryption_cipher': None, 'encryption_key_size': None, 'encryption_luks_version': None}]) => {
"ansible_facts": {
"_storage_vols_w_defaults": [
{
"encryption": true,
"encryption_cipher": null,
"encryption_key_file": null,
"encryption_key_size": null,
"encryption_luks_version": null,
"encryption_passphrase": "yabbadabbadoo",
"fs_create_options": "",
"fs_label": "",
"fs_overwrite_existing": true,
"fs_type": "xfs",
"mount_check": 0,
"mount_device_identifier": "uuid",
"mount_options": "defaults",
"mount_passno": 0,
"mount_point": "/opt/test1",
"name": "test1",
"pool": "foo",
"raid_chunk_size": null,
"raid_device_count": null,
"raid_level": null,
"raid_metadata_version": null,
"raid_spare_count": null,
"size": 0,
"state": "present",
"type": "partition"
}
]
},
"ansible_index_var": "idx",
"ansible_loop_var": "item",
"changed": false,
"idx": 0,
"item": [
{
"encryption": true,
"encryption_passphrase": "yabbadabbadoo",
"mount_point": "/opt/test1",
"name": "test1",
"type": "partition"
},
{
"encryption": false,
"encryption_cipher": null,
"encryption_key_file": null,
"encryption_key_size": null,
"encryption_luks_version": null,
"encryption_passphrase": null,
"fs_create_options": "",
"fs_label": "",
"fs_overwrite_existing": true,
"fs_type": "xfs",
"mount_check": 0,
"mount_device_identifier": "uuid",
"mount_options": "defaults",
"mount_passno": 0,
"mount_point": "",
"raid_chunk_size": null,
"raid_device_count": null,
"raid_level": null,
"raid_metadata_version": null,
"raid_spare_count": null,
"size": 0,
"state": "present",
"type": "lvm"
}
]
}

TASK [storage : Apply defaults to pools and volumes [4/6]] **********************************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:52
ok: [localhost] => (item={'state': 'present', 'type': 'partition', 'encryption': False, 'encryption_passphrase': None, 'encryption_key_file': None, 'encryption_cipher': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, 'name': 'foo', 'disks': ['nvme0n1'], 'volumes': [{'name': 'test1', 'type': 'partition', 'mount_point': '/opt/test1', 'encryption': True, 'encryption_passphrase': 'yabbadabbadoo'}]}) => {
"ansible_facts": {
"_storage_vols_no_defaults_by_pool": {
"foo": [
{
"encryption": true,
"encryption_cipher": null,
"encryption_key_file": null,
"encryption_key_size": null,
"encryption_luks_version": null,
"encryption_passphrase": "yabbadabbadoo",
"fs_create_options": "",
"fs_label": "",
"fs_overwrite_existing": true,
"fs_type": "xfs",
"mount_check": 0,
"mount_device_identifier": "uuid",
"mount_options": "defaults",
"mount_passno": 0,
"mount_point": "/opt/test1",
"name": "test1",
"pool": "foo",
"raid_chunk_size": null,
"raid_device_count": null,
"raid_level": null,
"raid_metadata_version": null,
"raid_spare_count": null,
"size": 0,
"state": "present",
"type": "partition"
}
]
}
},
"ansible_loop_var": "item",
"changed": false,
"item": {
"disks": [
"nvme0n1"
],
"encryption": false,
"encryption_cipher": null,
"encryption_key_file": null,
"encryption_key_size": null,
"encryption_luks_version": null,
"encryption_passphrase": null,
"name": "foo",
"raid_chunk_size": null,
"raid_device_count": null,
"raid_level": null,
"raid_metadata_version": null,
"raid_spare_count": null,
"state": "present",
"type": "partition",
"volumes": [
{
"encryption": true,
"encryption_passphrase": "yabbadabbadoo",
"mount_point": "/opt/test1",
"name": "test1",
"type": "partition"
}
]
}
}

TASK [storage : Apply defaults to pools and volumes [5/6]] **********************************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:61
ok: [localhost] => (item={'state': 'present', 'type': 'partition', 'encryption': False, 'encryption_passphrase': None, 'encryption_key_file': None, 'encryption_cipher': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, 'name': 'foo', 'disks': ['nvme0n1'], 'volumes': [{'name': 'test1', 'type': 'partition', 'mount_point': '/opt/test1', 'encryption': True, 'encryption_passphrase': 'yabbadabbadoo'}]}) => {
"ansible_facts": {
"_storage_pools": [
{
"disks": [
"nvme0n1"
],
"encryption": false,
"encryption_cipher": null,
"encryption_key_file": null,
"encryption_key_size": null,
"encryption_luks_version": null,
"encryption_passphrase": null,
"name": "foo",
"raid_chunk_size": null,
"raid_device_count": null,
"raid_level": null,
"raid_metadata_version": null,
"raid_spare_count": null,
"state": "present",
"type": "partition",
"volumes": [
{
"encryption": true,
"encryption_cipher": null,
"encryption_key_file": null,
"encryption_key_size": null,
"encryption_luks_version": null,
"encryption_passphrase": "yabbadabbadoo",
"fs_create_options": "",
"fs_label": "",
"fs_overwrite_existing": true,
"fs_type": "xfs",
"mount_check": 0,
"mount_device_identifier": "uuid",
"mount_options": "defaults",
"mount_passno": 0,
"mount_point": "/opt/test1",
"name": "test1",
"pool": "foo",
"raid_chunk_size": null,
"raid_device_count": null,
"raid_level": null,
"raid_metadata_version": null,
"raid_spare_count": null,
"size": 0,
"state": "present",
"type": "partition"
}
]
}
]
},
"ansible_index_var": "idx",
"ansible_loop_var": "pool",
"changed": false,
"idx": 0,
"pool": {
"disks": [
"nvme0n1"
],
"encryption": false,
"encryption_cipher": null,
"encryption_key_file": null,
"encryption_key_size": null,
"encryption_luks_version": null,
"encryption_passphrase": null,
"name": "foo",
"raid_chunk_size": null,
"raid_device_count": null,
"raid_level": null,
"raid_metadata_version": null,
"raid_spare_count": null,
"state": "present",
"type": "partition",
"volumes": [
{
"encryption": true,
"encryption_passphrase": "yabbadabbadoo",
"mount_point": "/opt/test1",
"name": "test1",
"type": "partition"
}
]
}
}

TASK [storage : Apply defaults to pools and volumes [6/6]] **********************************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:72

TASK [storage : debug] **********************************************************************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:84
ok: [localhost] => {
"_storage_pools": [
{
"disks": [
"nvme0n1"
],
"encryption": false,
"encryption_cipher": null,
"encryption_key_file": null,
"encryption_key_size": null,
"encryption_luks_version": null,
"encryption_passphrase": null,
"name": "foo",
"raid_chunk_size": null,
"raid_device_count": null,
"raid_level": null,
"raid_metadata_version": null,
"raid_spare_count": null,
"state": "present",
"type": "partition",
"volumes": [
{
"encryption": true,
"encryption_cipher": null,
"encryption_key_file": null,
"encryption_key_size": null,
"encryption_luks_version": null,
"encryption_passphrase": "yabbadabbadoo",
"fs_create_options": "",
"fs_label": "",
"fs_overwrite_existing": true,
"fs_type": "xfs",
"mount_check": 0,
"mount_device_identifier": "uuid",
"mount_options": "defaults",
"mount_passno": 0,
"mount_point": "/opt/test1",
"name": "test1",
"pool": "foo",
"raid_chunk_size": null,
"raid_device_count": null,
"raid_level": null,
"raid_metadata_version": null,
"raid_spare_count": null,
"size": 0,
"state": "present",
"type": "partition"
}
]
}
]
}

TASK [storage : debug] **********************************************************************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:87
ok: [localhost] => {
"_storage_volumes": []
}

TASK [storage : get required packages] ******************************************************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:90
ESTABLISH LOCAL CONNECTION FOR USER: root
EXEC /bin/sh -c 'echo ~root && sleep 0'
EXEC /bin/sh -c '( umask 77 && mkdir -p "echo /root/.ansible/tmp"&& mkdir /root/.ansible/tmp/ansible-tmp-1596551691.1982038-16465-51285333209511 && echo ansible-tmp-1596551691.1982038-16465-51285333209511="echo /root/.ansible/tmp/ansible-tmp-1596551691.1982038-16465-51285333209511" ) && sleep 0'
Using module file /root/test/storage/library/blivet.py
PUT /root/.ansible/tmp/ansible-local-15515o7pn3a74/tmps14vqdi_ TO /root/.ansible/tmp/ansible-tmp-1596551691.1982038-16465-51285333209511/AnsiballZ_blivet.py
EXEC /bin/sh -c 'chmod u+x /root/.ansible/tmp/ansible-tmp-1596551691.1982038-16465-51285333209511/ /root/.ansible/tmp/ansible-tmp-1596551691.1982038-16465-51285333209511/AnsiballZ_blivet.py && sleep 0'
EXEC /bin/sh -c '/usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1596551691.1982038-16465-51285333209511/AnsiballZ_blivet.py && sleep 0'
EXEC /bin/sh -c 'rm -f -r /root/.ansible/tmp/ansible-tmp-1596551691.1982038-16465-51285333209511/ > /dev/null 2>&1 && sleep 0'
ok: [localhost] => {
"actions": [],
"changed": false,
"crypts": [],
"invocation": {
"module_args": {
"disklabel_type": null,
"packages_only": true,
"pools": [
{
"disks": [
"nvme0n1"
],
"encryption": false,
"encryption_cipher": null,
"encryption_key_file": null,
"encryption_key_size": null,
"encryption_luks_version": null,
"encryption_passphrase": null,
"name": "foo",
"raid_chunk_size": null,
"raid_device_count": null,
"raid_level": null,
"raid_metadata_version": null,
"raid_spare_count": null,
"state": "present",
"type": "partition",
"volumes": [
{
"encryption": true,
"encryption_cipher": null,
"encryption_key_file": null,
"encryption_key_size": null,
"encryption_luks_version": null,
"encryption_passphrase": "yabbadabbadoo",
"fs_create_options": "",
"fs_label": "",
"fs_overwrite_existing": true,
"fs_type": "xfs",
"mount_check": 0,
"mount_device_identifier": "uuid",
"mount_options": "defaults",
"mount_passno": 0,
"mount_point": "/opt/test1",
"name": "test1",
"pool": "foo",
"raid_chunk_size": null,
"raid_device_count": null,
"raid_level": null,
"raid_metadata_version": null,
"raid_spare_count": null,
"size": 0,
"state": "present",
"type": "partition"
}
]
}
],
"safe_mode": true,
"use_partitions": null,
"volumes": []
}
},
"leaves": [],
"mounts": [],
"packages": [
"cryptsetup",
"xfsprogs"
],
"pools": [],
"volumes": []
}

TASK [storage : make sure required packages are installed] **********************************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:99
ESTABLISH LOCAL CONNECTION FOR USER: root
EXEC /bin/sh -c 'echo ~root && sleep 0'
EXEC /bin/sh -c '( umask 77 && mkdir -p "echo /root/.ansible/tmp"&& mkdir /root/.ansible/tmp/ansible-tmp-1596551694.9371064-16528-8555688808085 && echo ansible-tmp-1596551694.9371064-16528-8555688808085="echo /root/.ansible/tmp/ansible-tmp-1596551694.9371064-16528-8555688808085" ) && sleep 0'
Using module file /usr/lib/python3.6/site-packages/ansible/modules/packaging/os/dnf.py
PUT /root/.ansible/tmp/ansible-local-15515o7pn3a74/tmpajtghxml TO /root/.ansible/tmp/ansible-tmp-1596551694.9371064-16528-8555688808085/AnsiballZ_dnf.py
EXEC /bin/sh -c 'chmod u+x /root/.ansible/tmp/ansible-tmp-1596551694.9371064-16528-8555688808085/ /root/.ansible/tmp/ansible-tmp-1596551694.9371064-16528-8555688808085/AnsiballZ_dnf.py && sleep 0'
EXEC /bin/sh -c '/usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1596551694.9371064-16528-8555688808085/AnsiballZ_dnf.py && sleep 0'
EXEC /bin/sh -c 'rm -f -r /root/.ansible/tmp/ansible-tmp-1596551694.9371064-16528-8555688808085/ > /dev/null 2>&1 && sleep 0'
changed: [localhost] => {
"changed": true,
"invocation": {
"module_args": {
"allow_downgrade": false,
"autoremove": false,
"bugfix": false,
"conf_file": null,
"disable_excludes": null,
"disable_gpg_check": false,
"disable_plugin": [],
"disablerepo": [],
"download_dir": null,
"download_only": false,
"enable_plugin": [],
"enablerepo": [],
"exclude": [],
"install_repoquery": true,
"install_weak_deps": true,
"installroot": "/",
"list": null,
"lock_timeout": 30,
"name": [
"cryptsetup",
"xfsprogs"
],
"releasever": null,
"security": false,
"skip_broken": false,
"state": "present",
"update_cache": false,
"update_only": false,
"validate_certs": true
}
},
"msg": "",
"rc": 0,
"results": [
"Installed: cryptsetup-2.3.3-1.el8.x86_64"
]
}

TASK [storage : manage the pools and volumes to match the specified state] ******************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:104
ESTABLISH LOCAL CONNECTION FOR USER: root
EXEC /bin/sh -c 'echo ~root && sleep 0'
EXEC /bin/sh -c '( umask 77 && mkdir -p "echo /root/.ansible/tmp"&& mkdir /root/.ansible/tmp/ansible-tmp-1596551700.3955412-16581-219911815072388 && echo ansible-tmp-1596551700.3955412-16581-219911815072388="echo /root/.ansible/tmp/ansible-tmp-1596551700.3955412-16581-219911815072388" ) && sleep 0'
Using module file /root/test/storage/library/blivet.py
PUT /root/.ansible/tmp/ansible-local-15515o7pn3a74/tmpoq5ibqe2 TO /root/.ansible/tmp/ansible-tmp-1596551700.3955412-16581-219911815072388/AnsiballZ_blivet.py
EXEC /bin/sh -c 'chmod u+x /root/.ansible/tmp/ansible-tmp-1596551700.3955412-16581-219911815072388/ /root/.ansible/tmp/ansible-tmp-1596551700.3955412-16581-219911815072388/AnsiballZ_blivet.py && sleep 0'
EXEC /bin/sh -c '/usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1596551700.3955412-16581-219911815072388/AnsiballZ_blivet.py && sleep 0'
EXEC /bin/sh -c 'rm -f -r /root/.ansible/tmp/ansible-tmp-1596551700.3955412-16581-219911815072388/ > /dev/null 2>&1 && sleep 0'
changed: [localhost] => {
"actions": [
{
"action": "create format",
"device": "/dev/nvme0n1",
"fs_type": "disklabel"
},
{
"action": "create device",
"device": "/dev/nvme0n1p1",
"fs_type": null
},
{
"action": "create format",
"device": "/dev/nvme0n1p1",
"fs_type": "luks"
},
{
"action": "create device",
"device": "/dev/mapper/luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5",
"fs_type": null
},
{
"action": "create format",
"device": "/dev/mapper/luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5",
"fs_type": "xfs"
}
],
"changed": true,
"crypts": [
{
"backing_device": "/dev/nvme0n1p1",
"name": "luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5",
"password": "-",
"state": "present"
}
],
"invocation": {
"module_args": {
"disklabel_type": null,
"packages_only": false,
"pools": [
{
"disks": [
"nvme0n1"
],
"encryption": false,
"encryption_cipher": null,
"encryption_key_file": null,
"encryption_key_size": null,
"encryption_luks_version": null,
"encryption_passphrase": null,
"name": "foo",
"raid_chunk_size": null,
"raid_device_count": null,
"raid_level": null,
"raid_metadata_version": null,
"raid_spare_count": null,
"state": "present",
"type": "partition",
"volumes": [
{
"_device": "/dev/mapper/luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5",
"_kernel_device": "/dev/dm-3",
"_mount_id": "/dev/mapper/luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5",
"_raw_device": "/dev/nvme0n1p1",
"_raw_kernel_device": "/dev/nvme0n1p1",
"encryption": true,
"encryption_cipher": null,
"encryption_key_file": null,
"encryption_key_size": null,
"encryption_luks_version": null,
"encryption_passphrase": "yabbadabbadoo",
"fs_create_options": "",
"fs_label": "",
"fs_overwrite_existing": true,
"fs_type": "xfs",
"mount_check": 0,
"mount_device_identifier": "uuid",
"mount_options": "defaults",
"mount_passno": 0,
"mount_point": "/opt/test1",
"name": "test1",
"pool": "foo",
"raid_chunk_size": null,
"raid_device_count": null,
"raid_level": null,
"raid_metadata_version": null,
"raid_spare_count": null,
"size": 0,
"state": "present",
"type": "partition"
}
]
}
],
"safe_mode": false,
"use_partitions": null,
"volumes": []
}
},
"leaves": [
"/dev/sda1",
"/dev/sda2",
"/dev/mapper/rhel_storageqe--62-home",
"/dev/mapper/rhel_storageqe--62-root",
"/dev/mapper/rhel_storageqe--62-swap",
"/dev/sdb",
"/dev/sdh",
"/dev/sdi",
"/dev/sdj",
"/dev/sdc",
"/dev/sdk",
"/dev/sdl1",
"/dev/sdd",
"/dev/sde",
"/dev/sdf",
"/dev/sdg",
"/dev/mapper/luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5"
],
"mounts": [
{
"dump": 0,
"fstype": "xfs",
"opts": "defaults",
"passno": 0,
"path": "/opt/test1",
"src": "/dev/mapper/luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5",
"state": "mounted"
}
],
"packages": [
"e2fsprogs",
"lvm2",
"xfsprogs",
"cryptsetup",
"dosfstools"
],
"pools": [
{
"disks": [
"nvme0n1"
],
"encryption": false,
"encryption_cipher": null,
"encryption_key_file": null,
"encryption_key_size": null,
"encryption_luks_version": null,
"encryption_passphrase": null,
"name": "foo",
"raid_chunk_size": null,
"raid_device_count": null,
"raid_level": null,
"raid_metadata_version": null,
"raid_spare_count": null,
"state": "present",
"type": "partition",
"volumes": [
{
"_device": "/dev/mapper/luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5",
"_kernel_device": "/dev/dm-3",
"_mount_id": "/dev/mapper/luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5",
"_raw_device": "/dev/nvme0n1p1",
"_raw_kernel_device": "/dev/nvme0n1p1",
"encryption": true,
"encryption_cipher": null,
"encryption_key_file": null,
"encryption_key_size": null,
"encryption_luks_version": null,
"encryption_passphrase": "yabbadabbadoo",
"fs_create_options": "",
"fs_label": "",
"fs_overwrite_existing": true,
"fs_type": "xfs",
"mount_check": 0,
"mount_device_identifier": "uuid",
"mount_options": "defaults",
"mount_passno": 0,
"mount_point": "/opt/test1",
"name": "test1",
"pool": "foo",
"raid_chunk_size": null,
"raid_device_count": null,
"raid_level": null,
"raid_metadata_version": null,
"raid_spare_count": null,
"size": 0,
"state": "present",
"type": "partition"
}
]
}
],
"volumes": []
}

TASK [storage : debug] **********************************************************************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:113
ok: [localhost] => {
"blivet_output": {
"actions": [
{
"action": "create format",
"device": "/dev/nvme0n1",
"fs_type": "disklabel"
},
{
"action": "create device",
"device": "/dev/nvme0n1p1",
"fs_type": null
},
{
"action": "create format",
"device": "/dev/nvme0n1p1",
"fs_type": "luks"
},
{
"action": "create device",
"device": "/dev/mapper/luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5",
"fs_type": null
},
{
"action": "create format",
"device": "/dev/mapper/luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5",
"fs_type": "xfs"
}
],
"changed": true,
"crypts": [
{
"backing_device": "/dev/nvme0n1p1",
"name": "luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5",
"password": "-",
"state": "present"
}
],
"failed": false,
"leaves": [
"/dev/sda1",
"/dev/sda2",
"/dev/mapper/rhel_storageqe--62-home",
"/dev/mapper/rhel_storageqe--62-root",
"/dev/mapper/rhel_storageqe--62-swap",
"/dev/sdb",
"/dev/sdh",
"/dev/sdi",
"/dev/sdj",
"/dev/sdc",
"/dev/sdk",
"/dev/sdl1",
"/dev/sdd",
"/dev/sde",
"/dev/sdf",
"/dev/sdg",
"/dev/mapper/luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5"
],
"mounts": [
{
"dump": 0,
"fstype": "xfs",
"opts": "defaults",
"passno": 0,
"path": "/opt/test1",
"src": "/dev/mapper/luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5",
"state": "mounted"
}
],
"packages": [
"e2fsprogs",
"lvm2",
"xfsprogs",
"cryptsetup",
"dosfstools"
],
"pools": [
{
"disks": [
"nvme0n1"
],
"encryption": false,
"encryption_cipher": null,
"encryption_key_file": null,
"encryption_key_size": null,
"encryption_luks_version": null,
"encryption_passphrase": null,
"name": "foo",
"raid_chunk_size": null,
"raid_device_count": null,
"raid_level": null,
"raid_metadata_version": null,
"raid_spare_count": null,
"state": "present",
"type": "partition",
"volumes": [
{
"_device": "/dev/mapper/luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5",
"_kernel_device": "/dev/dm-3",
"_mount_id": "/dev/mapper/luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5",
"_raw_device": "/dev/nvme0n1p1",
"_raw_kernel_device": "/dev/nvme0n1p1",
"encryption": true,
"encryption_cipher": null,
"encryption_key_file": null,
"encryption_key_size": null,
"encryption_luks_version": null,
"encryption_passphrase": "yabbadabbadoo",
"fs_create_options": "",
"fs_label": "",
"fs_overwrite_existing": true,
"fs_type": "xfs",
"mount_check": 0,
"mount_device_identifier": "uuid",
"mount_options": "defaults",
"mount_passno": 0,
"mount_point": "/opt/test1",
"name": "test1",
"pool": "foo",
"raid_chunk_size": null,
"raid_device_count": null,
"raid_level": null,
"raid_metadata_version": null,
"raid_spare_count": null,
"size": 0,
"state": "present",
"type": "partition"
}
]
}
],
"volumes": []
}
}

TASK [storage : set the list of pools for test verification] ********************************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:116
ok: [localhost] => {
"ansible_facts": {
"_storage_pools_list": [
{
"disks": [
"nvme0n1"
],
"encryption": false,
"encryption_cipher": null,
"encryption_key_file": null,
"encryption_key_size": null,
"encryption_luks_version": null,
"encryption_passphrase": null,
"name": "foo",
"raid_chunk_size": null,
"raid_device_count": null,
"raid_level": null,
"raid_metadata_version": null,
"raid_spare_count": null,
"state": "present",
"type": "partition",
"volumes": [
{
"_device": "/dev/mapper/luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5",
"_kernel_device": "/dev/dm-3",
"_mount_id": "/dev/mapper/luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5",
"_raw_device": "/dev/nvme0n1p1",
"_raw_kernel_device": "/dev/nvme0n1p1",
"encryption": true,
"encryption_cipher": null,
"encryption_key_file": null,
"encryption_key_size": null,
"encryption_luks_version": null,
"encryption_passphrase": "yabbadabbadoo",
"fs_create_options": "",
"fs_label": "",
"fs_overwrite_existing": true,
"fs_type": "xfs",
"mount_check": 0,
"mount_device_identifier": "uuid",
"mount_options": "defaults",
"mount_passno": 0,
"mount_point": "/opt/test1",
"name": "test1",
"pool": "foo",
"raid_chunk_size": null,
"raid_device_count": null,
"raid_level": null,
"raid_metadata_version": null,
"raid_spare_count": null,
"size": 0,
"state": "present",
"type": "partition"
}
]
}
]
},
"changed": false
}

TASK [storage : set the list of volumes for test verification] ******************************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:120
ok: [localhost] => {
"ansible_facts": {
"_storage_volumes_list": []
},
"changed": false
}

TASK [storage : remove obsolete mounts] *****************************************************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:136

TASK [storage : tell systemd to refresh its view of /etc/fstab] *****************************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:147
ESTABLISH LOCAL CONNECTION FOR USER: root
EXEC /bin/sh -c 'echo ~root && sleep 0'
EXEC /bin/sh -c '( umask 77 && mkdir -p "echo /root/.ansible/tmp"&& mkdir /root/.ansible/tmp/ansible-tmp-1596551718.4167783-17619-4844520969938 && echo ansible-tmp-1596551718.4167783-17619-4844520969938="echo /root/.ansible/tmp/ansible-tmp-1596551718.4167783-17619-4844520969938" ) && sleep 0'
Using module file /usr/lib/python3.6/site-packages/ansible/modules/system/systemd.py
PUT /root/.ansible/tmp/ansible-local-15515o7pn3a74/tmpcbripnpt TO /root/.ansible/tmp/ansible-tmp-1596551718.4167783-17619-4844520969938/AnsiballZ_systemd.py
EXEC /bin/sh -c 'chmod u+x /root/.ansible/tmp/ansible-tmp-1596551718.4167783-17619-4844520969938/ /root/.ansible/tmp/ansible-tmp-1596551718.4167783-17619-4844520969938/AnsiballZ_systemd.py && sleep 0'
EXEC /bin/sh -c '/usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1596551718.4167783-17619-4844520969938/AnsiballZ_systemd.py && sleep 0'
EXEC /bin/sh -c 'rm -f -r /root/.ansible/tmp/ansible-tmp-1596551718.4167783-17619-4844520969938/ > /dev/null 2>&1 && sleep 0'
ok: [localhost] => {
"changed": false,
"invocation": {
"module_args": {
"daemon_reexec": false,
"daemon_reload": true,
"enabled": null,
"force": null,
"masked": null,
"name": null,
"no_block": false,
"scope": null,
"state": null,
"user": null
}
},
"name": null,
"status": {}
}

TASK [storage : set up new/current mounts] **************************************************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:152
ESTABLISH LOCAL CONNECTION FOR USER: root
EXEC /bin/sh -c 'echo ~root && sleep 0'
EXEC /bin/sh -c '( umask 77 && mkdir -p "echo /root/.ansible/tmp"&& mkdir /root/.ansible/tmp/ansible-tmp-1596551720.0600996-17655-275030844737805 && echo ansible-tmp-1596551720.0600996-17655-275030844737805="echo /root/.ansible/tmp/ansible-tmp-1596551720.0600996-17655-275030844737805" ) && sleep 0'
Using module file /usr/lib/python3.6/site-packages/ansible/modules/system/mount.py
PUT /root/.ansible/tmp/ansible-local-15515o7pn3a74/tmpc2j9_s_a TO /root/.ansible/tmp/ansible-tmp-1596551720.0600996-17655-275030844737805/AnsiballZ_mount.py
EXEC /bin/sh -c 'chmod u+x /root/.ansible/tmp/ansible-tmp-1596551720.0600996-17655-275030844737805/ /root/.ansible/tmp/ansible-tmp-1596551720.0600996-17655-275030844737805/AnsiballZ_mount.py && sleep 0'
EXEC /bin/sh -c '/usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1596551720.0600996-17655-275030844737805/AnsiballZ_mount.py && sleep 0'
EXEC /bin/sh -c 'rm -f -r /root/.ansible/tmp/ansible-tmp-1596551720.0600996-17655-275030844737805/ > /dev/null 2>&1 && sleep 0'
changed: [localhost] => (item={'src': '/dev/mapper/luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted'}) => {
"ansible_loop_var": "mount_info",
"changed": true,
"dump": "0",
"fstab": "/etc/fstab",
"fstype": "xfs",
"invocation": {
"module_args": {
"backup": false,
"boot": true,
"dump": null,
"fstab": null,
"fstype": "xfs",
"opts": "defaults",
"passno": null,
"path": "/opt/test1",
"src": "/dev/mapper/luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5",
"state": "mounted"
}
},
"mount_info": {
"dump": 0,
"fstype": "xfs",
"opts": "defaults",
"passno": 0,
"path": "/opt/test1",
"src": "/dev/mapper/luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5",
"state": "mounted"
},
"name": "/opt/test1",
"opts": "defaults",
"passno": "0",
"src": "/dev/mapper/luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5"
}

TASK [storage : tell systemd to refresh its view of /etc/fstab] *****************************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:163
ESTABLISH LOCAL CONNECTION FOR USER: root
EXEC /bin/sh -c 'echo ~root && sleep 0'
EXEC /bin/sh -c '( umask 77 && mkdir -p "echo /root/.ansible/tmp"&& mkdir /root/.ansible/tmp/ansible-tmp-1596551720.95644-17680-240149803781325 && echo ansible-tmp-1596551720.95644-17680-240149803781325="echo /root/.ansible/tmp/ansible-tmp-1596551720.95644-17680-240149803781325" ) && sleep 0'
Using module file /usr/lib/python3.6/site-packages/ansible/modules/system/systemd.py
PUT /root/.ansible/tmp/ansible-local-15515o7pn3a74/tmp58e0xbbj TO /root/.ansible/tmp/ansible-tmp-1596551720.95644-17680-240149803781325/AnsiballZ_systemd.py
EXEC /bin/sh -c 'chmod u+x /root/.ansible/tmp/ansible-tmp-1596551720.95644-17680-240149803781325/ /root/.ansible/tmp/ansible-tmp-1596551720.95644-17680-240149803781325/AnsiballZ_systemd.py && sleep 0'
EXEC /bin/sh -c '/usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1596551720.95644-17680-240149803781325/AnsiballZ_systemd.py && sleep 0'
EXEC /bin/sh -c 'rm -f -r /root/.ansible/tmp/ansible-tmp-1596551720.95644-17680-240149803781325/ > /dev/null 2>&1 && sleep 0'
ok: [localhost] => {
"changed": false,
"invocation": {
"module_args": {
"daemon_reexec": false,
"daemon_reload": true,
"enabled": null,
"force": null,
"masked": null,
"name": null,
"no_block": false,
"scope": null,
"state": null,
"user": null
}
},
"name": null,
"status": {}
}

TASK [storage : Manage /etc/crypttab to account for changes we just made] *******************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:171
ESTABLISH LOCAL CONNECTION FOR USER: root
EXEC /bin/sh -c 'echo ~root && sleep 0'
EXEC /bin/sh -c '( umask 77 && mkdir -p "echo /root/.ansible/tmp"&& mkdir /root/.ansible/tmp/ansible-tmp-1596551721.9902716-17717-260049588039911 && echo ansible-tmp-1596551721.9902716-17717-260049588039911="echo /root/.ansible/tmp/ansible-tmp-1596551721.9902716-17717-260049588039911" ) && sleep 0'
Using module file /usr/lib/python3.6/site-packages/ansible/modules/system/crypttab.py
PUT /root/.ansible/tmp/ansible-local-15515o7pn3a74/tmph6ki_zxv TO /root/.ansible/tmp/ansible-tmp-1596551721.9902716-17717-260049588039911/AnsiballZ_crypttab.py
EXEC /bin/sh -c 'chmod u+x /root/.ansible/tmp/ansible-tmp-1596551721.9902716-17717-260049588039911/ /root/.ansible/tmp/ansible-tmp-1596551721.9902716-17717-260049588039911/AnsiballZ_crypttab.py && sleep 0'
EXEC /bin/sh -c '/usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1596551721.9902716-17717-260049588039911/AnsiballZ_crypttab.py && sleep 0'
EXEC /bin/sh -c 'rm -f -r /root/.ansible/tmp/ansible-tmp-1596551721.9902716-17717-260049588039911/ > /dev/null 2>&1 && sleep 0'
changed: [localhost] => (item={'backing_device': '/dev/nvme0n1p1', 'name': 'luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5', 'password': '-', 'state': 'present'}) => {
"ansible_loop_var": "entry",
"backing_device": "/dev/nvme0n1p1",
"changed": true,
"entry": {
"backing_device": "/dev/nvme0n1p1",
"name": "luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5",
"password": "-",
"state": "present"
},
"gid": 0,
"group": "root",
"invocation": {
"module_args": {
"backing_device": "/dev/nvme0n1p1",
"name": "luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5",
"opts": null,
"password": "-",
"path": "/etc/crypttab",
"state": "present"
}
},
"mode": "0600",
"msg": "added line",
"name": "luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5",
"opts": null,
"owner": "root",
"password": "-",
"path": "/etc/crypttab",
"secontext": "system_u:object_r:etc_t:s0",
"size": 59,
"state": "file",
"uid": 0,
"warnings": [
"Module did not set no_log for password"
]
}

TASK [storage : Update facts] ***************************************************************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:186
ESTABLISH LOCAL CONNECTION FOR USER: root
EXEC /bin/sh -c 'echo ~root && sleep 0'
EXEC /bin/sh -c '( umask 77 && mkdir -p "echo /root/.ansible/tmp"&& mkdir /root/.ansible/tmp/ansible-tmp-1596551722.8393655-17732-211856504207708 && echo ansible-tmp-1596551722.8393655-17732-211856504207708="echo /root/.ansible/tmp/ansible-tmp-1596551722.8393655-17732-211856504207708" ) && sleep 0'
Using module file /usr/lib/python3.6/site-packages/ansible/modules/system/setup.py
PUT /root/.ansible/tmp/ansible-local-15515o7pn3a74/tmp2yhupv6h TO /root/.ansible/tmp/ansible-tmp-1596551722.8393655-17732-211856504207708/AnsiballZ_setup.py
EXEC /bin/sh -c 'chmod u+x /root/.ansible/tmp/ansible-tmp-1596551722.8393655-17732-211856504207708/ /root/.ansible/tmp/ansible-tmp-1596551722.8393655-17732-211856504207708/AnsiballZ_setup.py && sleep 0'
EXEC /bin/sh -c '/usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1596551722.8393655-17732-211856504207708/AnsiballZ_setup.py && sleep 0'
EXEC /bin/sh -c 'rm -f -r /root/.ansible/tmp/ansible-tmp-1596551722.8393655-17732-211856504207708/ > /dev/null 2>&1 && sleep 0'
ok: [localhost]

TASK [include_tasks] ************************************************************************************************************************************************************************************
task path: /root/test/storage/tests/tests_luks.yml:38
included: /root/test/storage/tests/verify-role-results.yml for localhost

TASK [Print out pool information] ***********************************************************************************************************************************************************************
task path: /root/test/storage/tests/verify-role-results.yml:1
ok: [localhost] => {
"_storage_pools_list": [
{
"disks": [
"nvme0n1"
],
"encryption": false,
"encryption_cipher": null,
"encryption_key_file": null,
"encryption_key_size": null,
"encryption_luks_version": null,
"encryption_passphrase": null,
"name": "foo",
"raid_chunk_size": null,
"raid_device_count": null,
"raid_level": null,
"raid_metadata_version": null,
"raid_spare_count": null,
"state": "present",
"type": "partition",
"volumes": [
{
"_device": "/dev/mapper/luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5",
"_kernel_device": "/dev/dm-3",
"_mount_id": "/dev/mapper/luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5",
"_raw_device": "/dev/nvme0n1p1",
"_raw_kernel_device": "/dev/nvme0n1p1",
"encryption": true,
"encryption_cipher": null,
"encryption_key_file": null,
"encryption_key_size": null,
"encryption_luks_version": null,
"encryption_passphrase": "yabbadabbadoo",
"fs_create_options": "",
"fs_label": "",
"fs_overwrite_existing": true,
"fs_type": "xfs",
"mount_check": 0,
"mount_device_identifier": "uuid",
"mount_options": "defaults",
"mount_passno": 0,
"mount_point": "/opt/test1",
"name": "test1",
"pool": "foo",
"raid_chunk_size": null,
"raid_device_count": null,
"raid_level": null,
"raid_metadata_version": null,
"raid_spare_count": null,
"size": 0,
"state": "present",
"type": "partition"
}
]
}
]
}

TASK [Print out volume information] *********************************************************************************************************************************************************************
task path: /root/test/storage/tests/verify-role-results.yml:6
skipping: [localhost] => {}

TASK [Collect info about the volumes.] ******************************************************************************************************************************************************************
task path: /root/test/storage/tests/verify-role-results.yml:14
ESTABLISH LOCAL CONNECTION FOR USER: root
EXEC /bin/sh -c 'echo ~root && sleep 0'
EXEC /bin/sh -c '( umask 77 && mkdir -p "echo /root/.ansible/tmp"&& mkdir /root/.ansible/tmp/ansible-tmp-1596551725.3757377-17847-222674636170443 && echo ansible-tmp-1596551725.3757377-17847-222674636170443="echo /root/.ansible/tmp/ansible-tmp-1596551725.3757377-17847-222674636170443" ) && sleep 0'
Using module file /root/test/storage/library/blockdev_info.py
PUT /root/.ansible/tmp/ansible-local-15515o7pn3a74/tmpyqchlbr_ TO /root/.ansible/tmp/ansible-tmp-1596551725.3757377-17847-222674636170443/AnsiballZ_blockdev_info.py
EXEC /bin/sh -c 'chmod u+x /root/.ansible/tmp/ansible-tmp-1596551725.3757377-17847-222674636170443/ /root/.ansible/tmp/ansible-tmp-1596551725.3757377-17847-222674636170443/AnsiballZ_blockdev_info.py && sleep 0'
EXEC /bin/sh -c '/usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1596551725.3757377-17847-222674636170443/AnsiballZ_blockdev_info.py && sleep 0'
EXEC /bin/sh -c 'rm -f -r /root/.ansible/tmp/ansible-tmp-1596551725.3757377-17847-222674636170443/ > /dev/null 2>&1 && sleep 0'
ok: [localhost] => {
"changed": false,
"info": {
"/dev/mapper/luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5": {
"fstype": "xfs",
"label": "",
"name": "/dev/mapper/luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5",
"size": "745.2G",
"type": "crypt",
"uuid": "f1698797-4201-47e1-ac23-985ab6927e03"
},
"/dev/mapper/rhel_storageqe--62-home": {
"fstype": "xfs",
"label": "",
"name": "/dev/mapper/rhel_storageqe--62-home",
"size": "200G",
"type": "lvm",
"uuid": "25082e54-84e7-4945-87a4-532894d69113"
},
"/dev/mapper/rhel_storageqe--62-root": {
"fstype": "xfs",
"label": "",
"name": "/dev/mapper/rhel_storageqe--62-root",
"size": "70G",
"type": "lvm",
"uuid": "2eeea9bb-806d-46d1-a309-9806a6d92074"
},
"/dev/mapper/rhel_storageqe--62-swap": {
"fstype": "swap",
"label": "",
"name": "/dev/mapper/rhel_storageqe--62-swap",
"size": "7.9G",
"type": "lvm",
"uuid": "aeaa2293-343b-4399-afa5-7d2ceafac06e"
},
"/dev/nvme0n1": {
"fstype": "",
"label": "",
"name": "/dev/nvme0n1",
"size": "745.2G",
"type": "disk",
"uuid": ""
},
"/dev/nvme0n1p1": {
"fstype": "crypto_LUKS",
"label": "",
"name": "/dev/nvme0n1p1",
"size": "745.2G",
"type": "partition",
"uuid": "d1731709-dfb2-4096-a9c0-6e332d6e95e5"
},
"/dev/sda": {
"fstype": "",
"label": "",
"name": "/dev/sda",
"size": "279.4G",
"type": "disk",
"uuid": ""
},
"/dev/sda1": {
"fstype": "vfat",
"label": "",
"name": "/dev/sda1",
"size": "600M",
"type": "partition",
"uuid": "E3F6-B0B3"
},
"/dev/sda2": {
"fstype": "xfs",
"label": "",
"name": "/dev/sda2",
"size": "1G",
"type": "partition",
"uuid": "02369863-9365-4c2c-a2c4-141b221fdf33"
},
"/dev/sda3": {
"fstype": "LVM2_member",
"label": "",
"name": "/dev/sda3",
"size": "277.8G",
"type": "partition",
"uuid": "XUQoSV-45yt-VtMv-SmVa-5iAe-NPRB-bkvLmD"
},
"/dev/sdb": {
"fstype": "xfs",
"label": "",
"name": "/dev/sdb",
"size": "279.4G",
"type": "disk",
"uuid": "22fd63bb-7a6a-4abd-ae93-03a803699d32"
},
"/dev/sdc": {
"fstype": "ext3",
"label": "",
"name": "/dev/sdc",
"size": "186.3G",
"type": "disk",
"uuid": "698fd066-11fb-49ee-bbd6-c196ac5776c4"
},
"/dev/sdd": {
"fstype": "ext3",
"label": "",
"name": "/dev/sdd",
"size": "111.8G",
"type": "disk",
"uuid": "ebb1ec3f-28cd-4df4-b73e-f8125892fa13"
},
"/dev/sde": {
"fstype": "xfs",
"label": "",
"name": "/dev/sde",
"size": "111.8G",
"type": "disk",
"uuid": "fb09fac2-02c3-4dc9-8fcd-9336b18a8f53"
},
"/dev/sdf": {
"fstype": "xfs",
"label": "",
"name": "/dev/sdf",
"size": "931.5G",
"type": "disk",
"uuid": "50c0a829-be65-4886-8f4d-7f750dbceea4"
},
"/dev/sdg": {
"fstype": "xfs",
"label": "",
"name": "/dev/sdg",
"size": "931.5G",
"type": "disk",
"uuid": "0028b32c-0f80-43e4-8de3-a6eb0487e43d"
},
"/dev/sdh": {
"fstype": "xfs",
"label": "",
"name": "/dev/sdh",
"size": "931.5G",
"type": "disk",
"uuid": "bc0d9a6e-58b2-4a88-8257-608835b5160c"
},
"/dev/sdi": {
"fstype": "xfs",
"label": "",
"name": "/dev/sdi",
"size": "931.5G",
"type": "disk",
"uuid": "f37a9ad7-bc49-4626-864e-a1831bb46d70"
},
"/dev/sdj": {
"fstype": "xfs",
"label": "",
"name": "/dev/sdj",
"size": "931.5G",
"type": "disk",
"uuid": "9e25c6d2-37ea-42bf-ade3-8a63622c7172"
},
"/dev/sdk": {
"fstype": "xfs",
"label": "",
"name": "/dev/sdk",
"size": "279.4G",
"type": "disk",
"uuid": "00fac5a6-cf60-4cb4-95a5-e2f0c0cad49f"
},
"/dev/sdl": {
"fstype": "",
"label": "",
"name": "/dev/sdl",
"size": "279.4G",
"type": "disk",
"uuid": ""
},
"/dev/sdl1": {
"fstype": "xfs",
"label": "",
"name": "/dev/sdl1",
"size": "279.4G",
"type": "partition",
"uuid": "8bd8c098-3eea-47f1-8551-2a2d5afd3de4"
}
},
"invocation": {
"module_args": {}
}
}

TASK [Read the /etc/fstab file for volume existence] ****************************************************************************************************************************************************
task path: /root/test/storage/tests/verify-role-results.yml:19
ESTABLISH LOCAL CONNECTION FOR USER: root
EXEC /bin/sh -c 'echo ~root && sleep 0'
EXEC /bin/sh -c '( umask 77 && mkdir -p "echo /root/.ansible/tmp"&& mkdir /root/.ansible/tmp/ansible-tmp-1596551726.173106-17863-268434075682075 && echo ansible-tmp-1596551726.173106-17863-268434075682075="echo /root/.ansible/tmp/ansible-tmp-1596551726.173106-17863-268434075682075" ) && sleep 0'
Using module file /usr/lib/python3.6/site-packages/ansible/modules/commands/command.py
PUT /root/.ansible/tmp/ansible-local-15515o7pn3a74/tmpzfc2liue TO /root/.ansible/tmp/ansible-tmp-1596551726.173106-17863-268434075682075/AnsiballZ_command.py
EXEC /bin/sh -c 'chmod u+x /root/.ansible/tmp/ansible-tmp-1596551726.173106-17863-268434075682075/ /root/.ansible/tmp/ansible-tmp-1596551726.173106-17863-268434075682075/AnsiballZ_command.py && sleep 0'
EXEC /bin/sh -c '/usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1596551726.173106-17863-268434075682075/AnsiballZ_command.py && sleep 0'
EXEC /bin/sh -c 'rm -f -r /root/.ansible/tmp/ansible-tmp-1596551726.173106-17863-268434075682075/ > /dev/null 2>&1 && sleep 0'
ok: [localhost] => {
"changed": false,
"cmd": [
"cat",
"/etc/fstab"
],
"delta": "0:00:00.003273",
"end": "2020-08-04 10:35:26.837134",
"invocation": {
"module_args": {
"_raw_params": "cat /etc/fstab",
"_uses_shell": false,
"argv": null,
"chdir": null,
"creates": null,
"executable": null,
"removes": null,
"stdin": null,
"stdin_add_newline": true,
"strip_empty_ends": true,
"warn": true
}
},
"rc": 0,
"start": "2020-08-04 10:35:26.833861",
"stderr": "",
"stderr_lines": [],
"stdout": "\n#\n# /etc/fstab\n# Created by anaconda on Tue Aug 4 14:18:46 2020\n#\n# Accessible filesystems, by reference, are maintained under '/dev/disk/'.\n# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info.\n#\n# After editing this file, run 'systemctl daemon-reload' to update systemd\n# units generated from this file.\n#\n/dev/mapper/rhel_storageqe--62-root / xfs defaults 0 0\nUUID=02369863-9365-4c2c-a2c4-141b221fdf33 /boot xfs defaults 0 0\nUUID=E3F6-B0B3 /boot/efi vfat umask=0077,shortname=winnt 0 2\n/dev/mapper/rhel_storageqe--62-home /home xfs defaults 0 0\n/dev/mapper/rhel_storageqe--62-swap none swap defaults 0 0\n/dev/mapper/luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5 /opt/test1 xfs defaults 0 0",
"stdout_lines": [
"",
"#",
"# /etc/fstab",
"# Created by anaconda on Tue Aug 4 14:18:46 2020",
"#",
"# Accessible filesystems, by reference, are maintained under '/dev/disk/'.",
"# See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info.",
"#",
"# After editing this file, run 'systemctl daemon-reload' to update systemd",
"# units generated from this file.",
"#",
"/dev/mapper/rhel_storageqe--62-root / xfs defaults 0 0",
"UUID=02369863-9365-4c2c-a2c4-141b221fdf33 /boot xfs defaults 0 0",
"UUID=E3F6-B0B3 /boot/efi vfat umask=0077,shortname=winnt 0 2",
"/dev/mapper/rhel_storageqe--62-home /home xfs defaults 0 0",
"/dev/mapper/rhel_storageqe--62-swap none swap defaults 0 0",
"/dev/mapper/luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5 /opt/test1 xfs defaults 0 0"
]
}

TASK [Read the /etc/crypttab file] **********************************************************************************************************************************************************************
task path: /root/test/storage/tests/verify-role-results.yml:24
ESTABLISH LOCAL CONNECTION FOR USER: root
EXEC /bin/sh -c 'echo ~root && sleep 0'
EXEC /bin/sh -c '( umask 77 && mkdir -p "echo /root/.ansible/tmp"&& mkdir /root/.ansible/tmp/ansible-tmp-1596551726.96098-17879-164953658137720 && echo ansible-tmp-1596551726.96098-17879-164953658137720="echo /root/.ansible/tmp/ansible-tmp-1596551726.96098-17879-164953658137720" ) && sleep 0'
Using module file /usr/lib/python3.6/site-packages/ansible/modules/commands/command.py
PUT /root/.ansible/tmp/ansible-local-15515o7pn3a74/tmpkwj9hgh_ TO /root/.ansible/tmp/ansible-tmp-1596551726.96098-17879-164953658137720/AnsiballZ_command.py
EXEC /bin/sh -c 'chmod u+x /root/.ansible/tmp/ansible-tmp-1596551726.96098-17879-164953658137720/ /root/.ansible/tmp/ansible-tmp-1596551726.96098-17879-164953658137720/AnsiballZ_command.py && sleep 0'
EXEC /bin/sh -c '/usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1596551726.96098-17879-164953658137720/AnsiballZ_command.py && sleep 0'
EXEC /bin/sh -c 'rm -f -r /root/.ansible/tmp/ansible-tmp-1596551726.96098-17879-164953658137720/ > /dev/null 2>&1 && sleep 0'
ok: [localhost] => {
"changed": false,
"cmd": [
"cat",
"/etc/crypttab"
],
"delta": "0:00:00.003386",
"end": "2020-08-04 10:35:27.250945",
"invocation": {
"module_args": {
"_raw_params": "cat /etc/crypttab",
"_uses_shell": false,
"argv": null,
"chdir": null,
"creates": null,
"executable": null,
"removes": null,
"stdin": null,
"stdin_add_newline": true,
"strip_empty_ends": true,
"warn": true
}
},
"rc": 0,
"start": "2020-08-04 10:35:27.247559",
"stderr": "",
"stderr_lines": [],
"stdout": "luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5 /dev/nvme0n1p1 -",
"stdout_lines": [
"luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5 /dev/nvme0n1p1 -"
]
}

TASK [Verify the volumes listed in storage_pools were correctly managed] ********************************************************************************************************************************
task path: /root/test/storage/tests/verify-role-results.yml:32
included: /root/test/storage/tests/test-verify-pool.yml for localhost

TASK [set_fact] *****************************************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-pool.yml:5
ok: [localhost] => {
"ansible_facts": {
"_storage_pool_tests": [
"members",
"md",
"volumes"
]
},
"changed": false
}

TASK [include_tasks] ************************************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-pool.yml:17
included: /root/test/storage/tests/test-verify-pool-members.yml for localhost
included: /root/test/storage/tests/test-verify-pool-md.yml for localhost
included: /root/test/storage/tests/test-verify-pool-volumes.yml for localhost

TASK [set_fact] *****************************************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-pool-members.yml:1
skipping: [localhost] => {
"changed": false,
"skip_reason": "Conditional result was False"
}

TASK [Get the canonical device path for each member device] *********************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-pool-members.yml:7
skipping: [localhost] => {
"changed": false,
"skip_reason": "Conditional result was False"
}

TASK [set_fact] *****************************************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-pool-members.yml:16
skipping: [localhost] => {
"changed": false,
"skip_reason": "Conditional result was False"
}

TASK [Verify PV count] **********************************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-pool-members.yml:23
skipping: [localhost] => {
"changed": false,
"skip_reason": "Conditional result was False"
}

TASK [set_fact] *****************************************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-pool-members.yml:29
skipping: [localhost] => {
"changed": false,
"skip_reason": "Conditional result was False"
}

TASK [set_fact] *****************************************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-pool-members.yml:33
skipping: [localhost] => {
"changed": false,
"skip_reason": "Conditional result was False"
}

TASK [set_fact] *****************************************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-pool-members.yml:37
skipping: [localhost] => {
"changed": false,
"skip_reason": "Conditional result was False"
}

TASK [Check the type of each PV] ************************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-pool-members.yml:41
skipping: [localhost] => {
"changed": false,
"skip_reason": "Conditional result was False"
}

TASK [Check member encryption] **************************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-pool-members.yml:50
included: /root/test/storage/tests/verify-pool-members-encryption.yml for localhost

TASK [set_fact] *****************************************************************************************************************************************************************************************
task path: /root/test/storage/tests/verify-pool-members-encryption.yml:4
ok: [localhost] => {
"ansible_facts": {
"_storage_test_expected_crypttab_entries": "0",
"_storage_test_expected_crypttab_key_file": "-"
},
"changed": false
}

TASK [Validate pool member LUKS settings] ***************************************************************************************************************************************************************
task path: /root/test/storage/tests/verify-pool-members-encryption.yml:8
skipping: [localhost] => {
"changed": false,
"skip_reason": "Conditional result was False"
}

TASK [Validate pool member crypttab entries] ************************************************************************************************************************************************************
task path: /root/test/storage/tests/verify-pool-members-encryption.yml:15
skipping: [localhost] => {
"changed": false,
"skip_reason": "Conditional result was False"
}

TASK [set_fact] *****************************************************************************************************************************************************************************************
task path: /root/test/storage/tests/verify-pool-members-encryption.yml:22
ok: [localhost] => {
"ansible_facts": {
"_storage_test_crypttab_entries": null,
"_storage_test_crypttab_key_file": null
},
"changed": false
}

TASK [set_fact] *****************************************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-pool-members.yml:53
ok: [localhost] => {
"ansible_facts": {
"_storage_test_expected_pv_count": null,
"_storage_test_expected_pv_type": null,
"_storage_test_pool_pvs": [],
"_storage_test_pool_pvs_lvm": []
},
"changed": false
}

TASK [get information about RAID] ***********************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-pool-md.yml:7
skipping: [localhost] => {
"changed": false,
"skip_reason": "Conditional result was False"
}

TASK [set_fact] *****************************************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-pool-md.yml:15
skipping: [localhost] => {
"changed": false,
"skip_reason": "Conditional result was False"
}

TASK [set_fact] *****************************************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-pool-md.yml:19
skipping: [localhost] => {
"changed": false,
"skip_reason": "Conditional result was False"
}

TASK [set_fact] *****************************************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-pool-md.yml:23
skipping: [localhost] => {
"changed": false,
"skip_reason": "Conditional result was False"
}

TASK [check RAID active devices count] ******************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-pool-md.yml:27
skipping: [localhost] => {
"changed": false,
"skip_reason": "Conditional result was False"
}

TASK [check RAID spare devices count] *******************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-pool-md.yml:33
skipping: [localhost] => {
"changed": false,
"skip_reason": "Conditional result was False"
}

TASK [check RAID metadata version] **********************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-pool-md.yml:39
skipping: [localhost] => {
"changed": false,
"skip_reason": "Conditional result was False"
}

TASK [set_fact] *****************************************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-pool-md.yml:47
ok: [localhost] => {
"ansible_facts": {
"storage_test_md_active_devices_re": null,
"storage_test_md_metadata_version_re": null,
"storage_test_md_spare_devices_re": null
},
"changed": false
}

TASK [verify the volumes] *******************************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-pool-volumes.yml:3
included: /root/test/storage/tests/test-verify-volume.yml for localhost

TASK [set_fact] *****************************************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-volume.yml:2
ok: [localhost] => {
"ansible_facts": {
"_storage_test_volume_present": true,
"_storage_volume_tests": [
"mount",
"fstab",
"fs",
"device",
"encryption",
"md",
"size"
]
},
"changed": false
}

TASK [include_tasks] ************************************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-volume.yml:10
included: /root/test/storage/tests/test-verify-volume-mount.yml for localhost
included: /root/test/storage/tests/test-verify-volume-fstab.yml for localhost
included: /root/test/storage/tests/test-verify-volume-fs.yml for localhost
included: /root/test/storage/tests/test-verify-volume-device.yml for localhost
included: /root/test/storage/tests/test-verify-volume-encryption.yml for localhost
included: /root/test/storage/tests/test-verify-volume-md.yml for localhost
included: /root/test/storage/tests/test-verify-volume-size.yml for localhost

TASK [Get expected mount device based on device type] ***************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-volume-mount.yml:6
ok: [localhost] => {
"ansible_facts": {
"storage_test_device_path": "/dev/mapper/luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5"
},
"changed": false
}

TASK [Set some facts] ***********************************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-volume-mount.yml:10
ok: [localhost] => {
"ansible_facts": {
"storage_test_mount_device_matches": [
{
"block_available": 193883505,
"block_size": 4096,
"block_total": 195253095,
"block_used": 1369590,
"device": "/dev/mapper/luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5",
"fstype": "xfs",
"inode_available": 390696957,
"inode_total": 390696960,
"inode_used": 3,
"mount": "/opt/test1",
"options": "rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,noquota",
"size_available": 794146836480,
"size_total": 799756677120,
"uuid": "f1698797-4201-47e1-ac23-985ab6927e03"
}
],
"storage_test_mount_expected_match_count": "1",
"storage_test_mount_point_matches": [
{
"block_available": 193883505,
"block_size": 4096,
"block_total": 195253095,
"block_used": 1369590,
"device": "/dev/mapper/luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5",
"fstype": "xfs",
"inode_available": 390696957,
"inode_total": 390696960,
"inode_used": 3,
"mount": "/opt/test1",
"options": "rw,seclabel,relatime,attr2,inode64,logbufs=8,logbsize=32k,noquota",
"size_available": 794146836480,
"size_total": 799756677120,
"uuid": "f1698797-4201-47e1-ac23-985ab6927e03"
}
],
"storage_test_swap_expected_matches": "0"
},
"changed": false
}

TASK [Verify the current mount state by device] *********************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-volume-mount.yml:22
ok: [localhost] => {
"changed": false,
"msg": "All assertions passed"
}

TASK [Verify the current mount state by mount point] ****************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-volume-mount.yml:31
ok: [localhost] => {
"changed": false,
"msg": "All assertions passed"
}

TASK [Verify the mount fs type] *************************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-volume-mount.yml:39
ok: [localhost] => {
"changed": false,
"msg": "All assertions passed"
}

TASK [command] ******************************************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-volume-mount.yml:48
skipping: [localhost] => {
"changed": false,
"skip_reason": "Conditional result was False"
}

TASK [Gather swap info] *********************************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-volume-mount.yml:52
skipping: [localhost] => {
"changed": false,
"skip_reason": "Conditional result was False"
}

TASK [Verify swap status] *******************************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-volume-mount.yml:57
skipping: [localhost] => {
"changed": false,
"skip_reason": "Conditional result was False"
}

TASK [Unset facts] **************************************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-volume-mount.yml:67
ok: [localhost] => {
"ansible_facts": {
"storage_test_mount_device_matches": null,
"storage_test_mount_expected_match_count": null,
"storage_test_mount_point_matches": null,
"storage_test_swap_expected_matches": null,
"storage_test_swaps": null,
"storage_test_sys_node": null
},
"changed": false
}

TASK [Set some variables for fstab checking] ************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-volume-fstab.yml:2
ok: [localhost] => {
"ansible_facts": {
"storage_test_fstab_expected_id_matches": "1",
"storage_test_fstab_expected_mount_point_matches": "1",
"storage_test_fstab_id_matches": [
"/dev/mapper/luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5 "
],
"storage_test_fstab_mount_point_matches": [
" /opt/test1 "
]
},
"changed": false
}

TASK [Verify that the device identifier appears in /etc/fstab] ******************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-volume-fstab.yml:10
ok: [localhost] => {
"changed": false,
"msg": "All assertions passed"
}

TASK [Verify the fstab mount point] *********************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-volume-fstab.yml:17
ok: [localhost] => {
"changed": false,
"msg": "All assertions passed"
}

TASK [Clean up variables] *******************************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-volume-fstab.yml:24
ok: [localhost] => {
"ansible_facts": {
"storage_test_fstab_expected_id_matches": null,
"storage_test_fstab_expected_mount_point_matches": null,
"storage_test_fstab_id_matches": null,
"storage_test_fstab_mount_point_matches": null
},
"changed": false
}

TASK [Verify fs type] ***********************************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-volume-fs.yml:4
ok: [localhost] => {
"changed": false,
"msg": "All assertions passed"
}

TASK [Verify fs label] **********************************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-volume-fs.yml:10
ok: [localhost] => {
"changed": false,
"msg": "All assertions passed"
}

TASK [See whether the device node is present] ***********************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-volume-device.yml:4
ESTABLISH LOCAL CONNECTION FOR USER: root
EXEC /bin/sh -c 'echo ~root && sleep 0'
EXEC /bin/sh -c '( umask 77 && mkdir -p "echo /root/.ansible/tmp"&& mkdir /root/.ansible/tmp/ansible-tmp-1596551731.5102558-17981-146301481834338 && echo ansible-tmp-1596551731.5102558-17981-146301481834338="echo /root/.ansible/tmp/ansible-tmp-1596551731.5102558-17981-146301481834338" ) && sleep 0'
Using module file /usr/lib/python3.6/site-packages/ansible/modules/files/stat.py
PUT /root/.ansible/tmp/ansible-local-15515o7pn3a74/tmp8pgwecrb TO /root/.ansible/tmp/ansible-tmp-1596551731.5102558-17981-146301481834338/AnsiballZ_stat.py
EXEC /bin/sh -c 'chmod u+x /root/.ansible/tmp/ansible-tmp-1596551731.5102558-17981-146301481834338/ /root/.ansible/tmp/ansible-tmp-1596551731.5102558-17981-146301481834338/AnsiballZ_stat.py && sleep 0'
EXEC /bin/sh -c '/usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1596551731.5102558-17981-146301481834338/AnsiballZ_stat.py && sleep 0'
EXEC /bin/sh -c 'rm -f -r /root/.ansible/tmp/ansible-tmp-1596551731.5102558-17981-146301481834338/ > /dev/null 2>&1 && sleep 0'
ok: [localhost] => {
"changed": false,
"invocation": {
"module_args": {
"checksum_algorithm": "sha1",
"follow": true,
"get_attributes": true,
"get_checksum": true,
"get_md5": false,
"get_mime": true,
"path": "/dev/nvme0n1p1"
}
},
"stat": {
"atime": 1596551717.0222013,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 0,
"charset": "binary",
"ctime": 1596551717.0222013,
"dev": 6,
"device_type": 66306,
"executable": false,
"exists": true,
"gid": 6,
"gr_name": "disk",
"inode": 57774,
"isblk": true,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": false,
"issock": false,
"isuid": false,
"mimetype": "inode/blockdevice",
"mode": "0660",
"mtime": 1596551717.0222013,
"nlink": 1,
"path": "/dev/nvme0n1p1",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": false,
"rusr": true,
"size": 0,
"uid": 0,
"version": null,
"wgrp": true,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": false,
"xoth": false,
"xusr": false
}
}

TASK [Verify the presence/absence of the device node] ***************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-volume-device.yml:10
ok: [localhost] => {
"changed": false,
"msg": "All assertions passed"
}

TASK [Make sure we got info about this volume] **********************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-volume-device.yml:18
ok: [localhost] => {
"changed": false,
"msg": "All assertions passed"
}

TASK [(1/2) Process volume type (set initial value)] ****************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-volume-device.yml:24
ok: [localhost] => {
"ansible_facts": {
"st_volume_type": "partition"
},
"changed": false
}

TASK [(2/2) Process volume type (get RAID value)] *******************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-volume-device.yml:28
skipping: [localhost] => {
"changed": false,
"skip_reason": "Conditional result was False"
}

TASK [Verify the volume's device type] ******************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-volume-device.yml:33
ok: [localhost] => {
"changed": false,
"msg": "All assertions passed"
}

TASK [Stat the LUKS device, if encrypted] ***************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-volume-encryption.yml:3
ESTABLISH LOCAL CONNECTION FOR USER: root
EXEC /bin/sh -c 'echo ~root && sleep 0'
EXEC /bin/sh -c '( umask 77 && mkdir -p "echo /root/.ansible/tmp"&& mkdir /root/.ansible/tmp/ansible-tmp-1596551732.8201458-18008-267787763126632 && echo ansible-tmp-1596551732.8201458-18008-267787763126632="echo /root/.ansible/tmp/ansible-tmp-1596551732.8201458-18008-267787763126632" ) && sleep 0'
Using module file /usr/lib/python3.6/site-packages/ansible/modules/files/stat.py
PUT /root/.ansible/tmp/ansible-local-15515o7pn3a74/tmpvhksr38p TO /root/.ansible/tmp/ansible-tmp-1596551732.8201458-18008-267787763126632/AnsiballZ_stat.py
EXEC /bin/sh -c 'chmod u+x /root/.ansible/tmp/ansible-tmp-1596551732.8201458-18008-267787763126632/ /root/.ansible/tmp/ansible-tmp-1596551732.8201458-18008-267787763126632/AnsiballZ_stat.py && sleep 0'
EXEC /bin/sh -c '/usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1596551732.8201458-18008-267787763126632/AnsiballZ_stat.py && sleep 0'
EXEC /bin/sh -c 'rm -f -r /root/.ansible/tmp/ansible-tmp-1596551732.8201458-18008-267787763126632/ > /dev/null 2>&1 && sleep 0'
ok: [localhost] => {
"changed": false,
"invocation": {
"module_args": {
"checksum_algorithm": "sha1",
"follow": true,
"get_attributes": true,
"get_checksum": true,
"get_md5": false,
"get_mime": true,
"path": "/dev/mapper/luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5"
}
},
"stat": {
"atime": 1596551717.577207,
"attr_flags": "",
"attributes": [],
"block_size": 4096,
"blocks": 0,
"charset": "binary",
"ctime": 1596551717.577207,
"dev": 6,
"device_type": 64771,
"executable": false,
"exists": true,
"gid": 6,
"gr_name": "disk",
"inode": 57791,
"isblk": true,
"ischr": false,
"isdir": false,
"isfifo": false,
"isgid": false,
"islnk": false,
"isreg": false,
"issock": false,
"isuid": false,
"mimetype": "inode/symlink",
"mode": "0660",
"mtime": 1596551717.577207,
"nlink": 1,
"path": "/dev/mapper/luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5",
"pw_name": "root",
"readable": true,
"rgrp": true,
"roth": false,
"rusr": true,
"size": 0,
"uid": 0,
"version": null,
"wgrp": true,
"woth": false,
"writeable": true,
"wusr": true,
"xgrp": false,
"xoth": false,
"xusr": false
}
}

TASK [Collect LUKS info for this volume] ****************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-volume-encryption.yml:10
ESTABLISH LOCAL CONNECTION FOR USER: root
EXEC /bin/sh -c 'echo ~root && sleep 0'
EXEC /bin/sh -c '( umask 77 && mkdir -p "echo /root/.ansible/tmp"&& mkdir /root/.ansible/tmp/ansible-tmp-1596551733.30379-18025-182321882018665 && echo ansible-tmp-1596551733.30379-18025-182321882018665="echo /root/.ansible/tmp/ansible-tmp-1596551733.30379-18025-182321882018665" ) && sleep 0'
Using module file /usr/lib/python3.6/site-packages/ansible/modules/commands/command.py
PUT /root/.ansible/tmp/ansible-local-15515o7pn3a74/tmpz2zc3haj TO /root/.ansible/tmp/ansible-tmp-1596551733.30379-18025-182321882018665/AnsiballZ_command.py
EXEC /bin/sh -c 'chmod u+x /root/.ansible/tmp/ansible-tmp-1596551733.30379-18025-182321882018665/ /root/.ansible/tmp/ansible-tmp-1596551733.30379-18025-182321882018665/AnsiballZ_command.py && sleep 0'
EXEC /bin/sh -c '/usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1596551733.30379-18025-182321882018665/AnsiballZ_command.py && sleep 0'
EXEC /bin/sh -c 'rm -f -r /root/.ansible/tmp/ansible-tmp-1596551733.30379-18025-182321882018665/ > /dev/null 2>&1 && sleep 0'
ok: [localhost] => {
"changed": false,
"cmd": [
"cryptsetup",
"luksDump",
"/dev/nvme0n1p1"
],
"delta": "0:00:01.304160",
"end": "2020-08-04 10:35:34.930632",
"invocation": {
"module_args": {
"_raw_params": "cryptsetup luksDump /dev/nvme0n1p1",
"_uses_shell": false,
"argv": null,
"chdir": null,
"creates": null,
"executable": null,
"removes": null,
"stdin": null,
"stdin_add_newline": true,
"strip_empty_ends": true,
"warn": true
}
},
"rc": 0,
"start": "2020-08-04 10:35:33.626472",
"stderr": "",
"stderr_lines": [],
"stdout": "LUKS header information\nVersion: \t2\nEpoch: \t3\nMetadata area: \t16384 [bytes]\nKeyslots area: \t16744448 [bytes]\nUUID: \td1731709-dfb2-4096-a9c0-6e332d6e95e5\nLabel: \t(no label)\nSubsystem: \t(no subsystem)\nFlags: \t(no flags)\n\nData segments:\n 0: crypt\n\toffset: 16777216 [bytes]\n\tlength: (whole device)\n\tcipher: aes-xts-plain64\n\tsector: 512 [bytes]\n\nKeyslots:\n 0: luks2\n\tKey: 512 bits\n\tPriority: normal\n\tCipher: aes-xts-plain64\n\tCipher key: 512 bits\n\tPBKDF: argon2i\n\tTime cost: 4\n\tMemory: 840148\n\tThreads: 4\n\tSalt: 7f 81 ca 1c 66 76 a1 91 5b c0 81 48 50 31 c6 30 \n\t 63 2b 77 87 c1 3f cb 1d 9c 87 6a b2 8e a6 e0 91 \n\tAF stripes: 4000\n\tAF hash: sha256\n\tArea offset:32768 [bytes]\n\tArea length:258048 [bytes]\n\tDigest ID: 0\nTokens:\nDigests:\n 0: pbkdf2\n\tHash: sha256\n\tIterations: 44043\n\tSalt: 56 8b 9c 92 d4 0b cb 46 2a ec 5f eb fc 36 d2 ff \n\t 3c 43 7b ef d8 5f 96 64 0f c7 03 2d ff fe 55 25 \n\tDigest: ab 7b c5 64 ee 16 fa a2 74 94 d2 7e 7b b0 63 16 \n\t 02 ab 81 2e c4 d7 62 6d 1a d4 a4 a4 77 00 ef 88 ",
"stdout_lines": [
"LUKS header information",
"Version: \t2",
"Epoch: \t3",
"Metadata area: \t16384 [bytes]",
"Keyslots area: \t16744448 [bytes]",
"UUID: \td1731709-dfb2-4096-a9c0-6e332d6e95e5",
"Label: \t(no label)",
"Subsystem: \t(no subsystem)",
"Flags: \t(no flags)",
"",
"Data segments:",
" 0: crypt",
"\toffset: 16777216 [bytes]",
"\tlength: (whole device)",
"\tcipher: aes-xts-plain64",
"\tsector: 512 [bytes]",
"",
"Keyslots:",
" 0: luks2",
"\tKey: 512 bits",
"\tPriority: normal",
"\tCipher: aes-xts-plain64",
"\tCipher key: 512 bits",
"\tPBKDF: argon2i",
"\tTime cost: 4",
"\tMemory: 840148",
"\tThreads: 4",
"\tSalt: 7f 81 ca 1c 66 76 a1 91 5b c0 81 48 50 31 c6 30 ",
"\t 63 2b 77 87 c1 3f cb 1d 9c 87 6a b2 8e a6 e0 91 ",
"\tAF stripes: 4000",
"\tAF hash: sha256",
"\tArea offset:32768 [bytes]",
"\tArea length:258048 [bytes]",
"\tDigest ID: 0",
"Tokens:",
"Digests:",
" 0: pbkdf2",
"\tHash: sha256",
"\tIterations: 44043",
"\tSalt: 56 8b 9c 92 d4 0b cb 46 2a ec 5f eb fc 36 d2 ff ",
"\t 3c 43 7b ef d8 5f 96 64 0f c7 03 2d ff fe 55 25 ",
"\tDigest: ab 7b c5 64 ee 16 fa a2 74 94 d2 7e 7b b0 63 16 ",
"\t 02 ab 81 2e c4 d7 62 6d 1a d4 a4 a4 77 00 ef 88 "
]
}

TASK [Verify the presence/absence of the LUKS device node] **********************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-volume-encryption.yml:16
ok: [localhost] => {
"changed": false,
"msg": "All assertions passed"
}

TASK [Verify that the raw device is the same as the device if not encrypted] ****************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-volume-encryption.yml:25
ok: [localhost] => {
"changed": false,
"msg": "All assertions passed"
}

TASK [Make sure we got info about the LUKS volume if encrypted] *****************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-volume-encryption.yml:31
ok: [localhost] => {
"changed": false,
"msg": "All assertions passed"
}

TASK [Verify the LUKS volume's device type if encrypted] ************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-volume-encryption.yml:37
ok: [localhost] => {
"changed": false,
"msg": "All assertions passed"
}

TASK [Check LUKS version] *******************************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-volume-encryption.yml:42
skipping: [localhost] => {
"changed": false,
"skip_reason": "Conditional result was False"
}

TASK [Check LUKS key size] ******************************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-volume-encryption.yml:48
skipping: [localhost] => {
"changed": false,
"skip_reason": "Conditional result was False"
}

TASK [Check LUKS cipher] ********************************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-volume-encryption.yml:54
skipping: [localhost] => {
"changed": false,
"skip_reason": "Conditional result was False"
}

TASK [set_fact] *****************************************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-volume-encryption.yml:60
ok: [localhost] => {
"ansible_facts": {
"_storage_test_crypttab_entries": [
"luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5 /dev/nvme0n1p1 -"
],
"_storage_test_expected_crypttab_entries": "1",
"_storage_test_expected_crypttab_key_file": "-"
},
"changed": false
}

TASK [Check for /etc/crypttab entry] ********************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-volume-encryption.yml:65
ok: [localhost] => {
"changed": false,
"msg": "All assertions passed"
}

TASK [Validate the format of the crypttab entry] ********************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-volume-encryption.yml:70
ok: [localhost] => {
"changed": false,
"msg": "All assertions passed"
}

TASK [Check backing device of crypttab entry] ***********************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-volume-encryption.yml:76
ok: [localhost] => {
"changed": false,
"msg": "All assertions passed"
}

TASK [Check key file of crypttab entry] *****************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-volume-encryption.yml:82
ok: [localhost] => {
"changed": false,
"msg": "All assertions passed"
}

TASK [set_fact] *****************************************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-volume-encryption.yml:88
ok: [localhost] => {
"ansible_facts": {
"_storage_test_crypttab_entries": null,
"_storage_test_expected_crypttab_entries": null,
"_storage_test_expected_crypttab_key_file": null
},
"changed": false
}

TASK [get information about RAID] ***********************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-volume-md.yml:7
skipping: [localhost] => {
"changed": false,
"skip_reason": "Conditional result was False"
}

TASK [set_fact] *****************************************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-volume-md.yml:13
skipping: [localhost] => {
"changed": false,
"skip_reason": "Conditional result was False"
}

TASK [set_fact] *****************************************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-volume-md.yml:17
skipping: [localhost] => {
"changed": false,
"skip_reason": "Conditional result was False"
}

TASK [set_fact] *****************************************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-volume-md.yml:21
skipping: [localhost] => {
"changed": false,
"skip_reason": "Conditional result was False"
}

TASK [check RAID active devices count] ******************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-volume-md.yml:25
skipping: [localhost] => {
"changed": false,
"skip_reason": "Conditional result was False"
}

TASK [check RAID spare devices count] *******************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-volume-md.yml:31
skipping: [localhost] => {
"changed": false,
"skip_reason": "Conditional result was False"
}

TASK [check RAID metadata version] **********************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-volume-md.yml:37
skipping: [localhost] => {
"changed": false,
"skip_reason": "Conditional result was False"
}

TASK [parse the actual size of the volume] **************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-volume-size.yml:3
skipping: [localhost] => {
"changed": false,
"skip_reason": "Conditional result was False"
}

TASK [parse the requested size of the volume] ***********************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-volume-size.yml:9
skipping: [localhost] => {
"changed": false,
"skip_reason": "Conditional result was False"
}

TASK [debug] ********************************************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-volume-size.yml:15
ok: [localhost] => {
"storage_test_actual_size": {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
}

TASK [debug] ********************************************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-volume-size.yml:18
ok: [localhost] => {
"storage_test_requested_size": {
"changed": false,
"skip_reason": "Conditional result was False",
"skipped": true
}
}

TASK [assert] *******************************************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-volume-size.yml:21
skipping: [localhost] => {
"changed": false,
"skip_reason": "Conditional result was False"
}

TASK [Clean up facts] ***********************************************************************************************************************************************************************************
task path: /root/test/storage/tests/test-verify-volume.yml:16
ok: [localhost] => {
"ansible_facts": {
"_storage_test_volume_present": null
},
"changed": false
}

TASK [Clean up variable namespace] **********************************************************************************************************************************************************************
task path: /root/test/storage/tests/verify-role-results.yml:39
ok: [localhost] => {
"ansible_facts": {
"storage_test_pool": null
},
"changed": false
}

TASK [Verify the volumes with no pool were correctly managed] *******************************************************************************************************************************************
task path: /root/test/storage/tests/verify-role-results.yml:46

TASK [Clean up variable namespace] **********************************************************************************************************************************************************************
task path: /root/test/storage/tests/verify-role-results.yml:56
ok: [localhost] => {
"ansible_facts": {
"storage_test_blkinfo": null,
"storage_test_crypttab": null,
"storage_test_fstab": null,
"storage_test_volume": null
},
"changed": false
}

TASK [Remove the encryption layer] **********************************************************************************************************************************************************************
task path: /root/test/storage/tests/tests_luks.yml:40

TASK [storage : Set version specific variables] *********************************************************************************************************************************************************
task path: /root/test/storage/tasks/main.yml:2
ok: [localhost] => {
"ansible_facts": {
"blivet_package_list": [
"python3-blivet",
"libblockdev-crypto",
"libblockdev-dm",
"libblockdev-lvm",
"libblockdev-mdraid",
"libblockdev-swap"
]
},
"ansible_included_var_files": [
"/root/test/storage/vars/RedHat_8.yml"
],
"changed": false
}

TASK [storage : define an empty list of pools to be used in testing] ************************************************************************************************************************************
task path: /root/test/storage/tasks/main.yml:14
ok: [localhost] => {
"ansible_facts": {
"_storage_pools_list": []
},
"changed": false
}

TASK [storage : define an empty list of volumes to be used in testing] **********************************************************************************************************************************
task path: /root/test/storage/tasks/main.yml:18
ok: [localhost] => {
"ansible_facts": {
"_storage_volumes_list": []
},
"changed": false
}

TASK [storage : include the appropriate provider tasks] *************************************************************************************************************************************************
task path: /root/test/storage/tasks/main.yml:22
included: /root/test/storage/tasks/main-blivet.yml for localhost

TASK [storage : get a list of rpm packages installed on host machine] ***********************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:2
skipping: [localhost] => {
"changed": false,
"skip_reason": "Conditional result was False"
}

TASK [storage : make sure blivet is available] **********************************************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:7
ESTABLISH LOCAL CONNECTION FOR USER: root
EXEC /bin/sh -c 'echo ~root && sleep 0'
EXEC /bin/sh -c '( umask 77 && mkdir -p "echo /root/.ansible/tmp"&& mkdir /root/.ansible/tmp/ansible-tmp-1596551738.06216-18111-12286179001058 && echo ansible-tmp-1596551738.06216-18111-12286179001058="echo /root/.ansible/tmp/ansible-tmp-1596551738.06216-18111-12286179001058" ) && sleep 0'
Using module file /usr/lib/python3.6/site-packages/ansible/modules/packaging/os/dnf.py
PUT /root/.ansible/tmp/ansible-local-15515o7pn3a74/tmp0nk6d_6q TO /root/.ansible/tmp/ansible-tmp-1596551738.06216-18111-12286179001058/AnsiballZ_dnf.py
EXEC /bin/sh -c 'chmod u+x /root/.ansible/tmp/ansible-tmp-1596551738.06216-18111-12286179001058/ /root/.ansible/tmp/ansible-tmp-1596551738.06216-18111-12286179001058/AnsiballZ_dnf.py && sleep 0'
EXEC /bin/sh -c '/usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1596551738.06216-18111-12286179001058/AnsiballZ_dnf.py && sleep 0'
EXEC /bin/sh -c 'rm -f -r /root/.ansible/tmp/ansible-tmp-1596551738.06216-18111-12286179001058/ > /dev/null 2>&1 && sleep 0'
ok: [localhost] => {
"changed": false,
"invocation": {
"module_args": {
"allow_downgrade": false,
"autoremove": false,
"bugfix": false,
"conf_file": null,
"disable_excludes": null,
"disable_gpg_check": false,
"disable_plugin": [],
"disablerepo": [],
"download_dir": null,
"download_only": false,
"enable_plugin": [],
"enablerepo": [],
"exclude": [],
"install_repoquery": true,
"install_weak_deps": true,
"installroot": "/",
"list": null,
"lock_timeout": 30,
"name": [
"python3-blivet",
"libblockdev-crypto",
"libblockdev-dm",
"libblockdev-lvm",
"libblockdev-mdraid",
"libblockdev-swap"
],
"releasever": null,
"security": false,
"skip_broken": false,
"state": "present",
"update_cache": false,
"update_only": false,
"validate_certs": true
}
},
"msg": "Nothing to do",
"rc": 0,
"results": []
}

TASK [storage : initialize internal facts] **************************************************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:18
ok: [localhost] => {
"ansible_facts": {
"_storage_pools": [],
"_storage_vol_defaults": [],
"_storage_vol_pools": [],
"_storage_vols_no_defaults": [],
"_storage_vols_no_defaults_by_pool": {},
"_storage_vols_w_defaults": [],
"_storage_volumes": []
},
"changed": false
}

TASK [storage : Apply defaults to pools and volumes [1/6]] **********************************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:28
ok: [localhost] => (item={'name': 'foo', 'type': 'partition', 'disks': ['nvme0n1'], 'volumes': [{'name': 'test1', 'type': 'partition', 'mount_point': '/opt/test1', 'encryption': False, 'encryption_passphrase': 'yabbadabbadoo'}]}) => {
"ansible_facts": {
"_storage_pools": [
{
"disks": [
"nvme0n1"
],
"encryption": false,
"encryption_cipher": null,
"encryption_key_file": null,
"encryption_key_size": null,
"encryption_luks_version": null,
"encryption_passphrase": null,
"name": "foo",
"raid_chunk_size": null,
"raid_device_count": null,
"raid_level": null,
"raid_metadata_version": null,
"raid_spare_count": null,
"state": "present",
"type": "partition",
"volumes": [
{
"encryption": false,
"encryption_passphrase": "yabbadabbadoo",
"mount_point": "/opt/test1",
"name": "test1",
"type": "partition"
}
]
}
]
},
"ansible_loop_var": "pool",
"changed": false,
"pool": {
"disks": [
"nvme0n1"
],
"name": "foo",
"type": "partition",
"volumes": [
{
"encryption": false,
"encryption_passphrase": "yabbadabbadoo",
"mount_point": "/opt/test1",
"name": "test1",
"type": "partition"
}
]
}
}

TASK [storage : Apply defaults to pools and volumes [2/6]] **********************************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:36
ok: [localhost] => (item=[{'state': 'present', 'type': 'partition', 'encryption': False, 'encryption_passphrase': None, 'encryption_key_file': None, 'encryption_cipher': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, 'name': 'foo', 'disks': ['nvme0n1'], 'volumes': [{'name': 'test1', 'type': 'partition', 'mount_point': '/opt/test1', 'encryption': False, 'encryption_passphrase': 'yabbadabbadoo'}]}, {'name': 'test1', 'type': 'partition', 'mount_point': '/opt/test1', 'encryption': False, 'encryption_passphrase': 'yabbadabbadoo'}]) => {
"ansible_facts": {
"_storage_vol_defaults": [
{
"encryption": false,
"encryption_cipher": null,
"encryption_key_file": null,
"encryption_key_size": null,
"encryption_luks_version": null,
"encryption_passphrase": null,
"fs_create_options": "",
"fs_label": "",
"fs_overwrite_existing": true,
"fs_type": "xfs",
"mount_check": 0,
"mount_device_identifier": "uuid",
"mount_options": "defaults",
"mount_passno": 0,
"mount_point": "",
"raid_chunk_size": null,
"raid_device_count": null,
"raid_level": null,
"raid_metadata_version": null,
"raid_spare_count": null,
"size": 0,
"state": "present",
"type": "lvm"
}
],
"_storage_vol_pools": [
"foo"
],
"_storage_vols_no_defaults": [
{
"encryption": false,
"encryption_passphrase": "yabbadabbadoo",
"mount_point": "/opt/test1",
"name": "test1",
"type": "partition"
}
]
},
"ansible_loop_var": "item",
"changed": false,
"item": [
{
"disks": [
"nvme0n1"
],
"encryption": false,
"encryption_cipher": null,
"encryption_key_file": null,
"encryption_key_size": null,
"encryption_luks_version": null,
"encryption_passphrase": null,
"name": "foo",
"raid_chunk_size": null,
"raid_device_count": null,
"raid_level": null,
"raid_metadata_version": null,
"raid_spare_count": null,
"state": "present",
"type": "partition",
"volumes": [
{
"encryption": false,
"encryption_passphrase": "yabbadabbadoo",
"mount_point": "/opt/test1",
"name": "test1",
"type": "partition"
}
]
},
{
"encryption": false,
"encryption_passphrase": "yabbadabbadoo",
"mount_point": "/opt/test1",
"name": "test1",
"type": "partition"
}
]
}

TASK [storage : Apply defaults to pools and volumes [3/6]] **********************************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:44
ok: [localhost] => (item=[{'name': 'test1', 'type': 'partition', 'mount_point': '/opt/test1', 'encryption': False, 'encryption_passphrase': 'yabbadabbadoo'}, {'state': 'present', 'type': 'lvm', 'size': 0, 'fs_type': 'xfs', 'fs_label': '', 'fs_create_options': '', 'fs_overwrite_existing': True, 'mount_point': '', 'mount_options': 'defaults', 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, 'encryption': False, 'encryption_passphrase': None, 'encryption_key_file': None, 'encryption_cipher': None, 'encryption_key_size': None, 'encryption_luks_version': None}]) => {
"ansible_facts": {
"_storage_vols_w_defaults": [
{
"encryption": false,
"encryption_cipher": null,
"encryption_key_file": null,
"encryption_key_size": null,
"encryption_luks_version": null,
"encryption_passphrase": "yabbadabbadoo",
"fs_create_options": "",
"fs_label": "",
"fs_overwrite_existing": true,
"fs_type": "xfs",
"mount_check": 0,
"mount_device_identifier": "uuid",
"mount_options": "defaults",
"mount_passno": 0,
"mount_point": "/opt/test1",
"name": "test1",
"pool": "foo",
"raid_chunk_size": null,
"raid_device_count": null,
"raid_level": null,
"raid_metadata_version": null,
"raid_spare_count": null,
"size": 0,
"state": "present",
"type": "partition"
}
]
},
"ansible_index_var": "idx",
"ansible_loop_var": "item",
"changed": false,
"idx": 0,
"item": [
{
"encryption": false,
"encryption_passphrase": "yabbadabbadoo",
"mount_point": "/opt/test1",
"name": "test1",
"type": "partition"
},
{
"encryption": false,
"encryption_cipher": null,
"encryption_key_file": null,
"encryption_key_size": null,
"encryption_luks_version": null,
"encryption_passphrase": null,
"fs_create_options": "",
"fs_label": "",
"fs_overwrite_existing": true,
"fs_type": "xfs",
"mount_check": 0,
"mount_device_identifier": "uuid",
"mount_options": "defaults",
"mount_passno": 0,
"mount_point": "",
"raid_chunk_size": null,
"raid_device_count": null,
"raid_level": null,
"raid_metadata_version": null,
"raid_spare_count": null,
"size": 0,
"state": "present",
"type": "lvm"
}
]
}

TASK [storage : Apply defaults to pools and volumes [4/6]] **********************************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:52
ok: [localhost] => (item={'state': 'present', 'type': 'partition', 'encryption': False, 'encryption_passphrase': None, 'encryption_key_file': None, 'encryption_cipher': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, 'name': 'foo', 'disks': ['nvme0n1'], 'volumes': [{'name': 'test1', 'type': 'partition', 'mount_point': '/opt/test1', 'encryption': False, 'encryption_passphrase': 'yabbadabbadoo'}]}) => {
"ansible_facts": {
"_storage_vols_no_defaults_by_pool": {
"foo": [
{
"encryption": false,
"encryption_cipher": null,
"encryption_key_file": null,
"encryption_key_size": null,
"encryption_luks_version": null,
"encryption_passphrase": "yabbadabbadoo",
"fs_create_options": "",
"fs_label": "",
"fs_overwrite_existing": true,
"fs_type": "xfs",
"mount_check": 0,
"mount_device_identifier": "uuid",
"mount_options": "defaults",
"mount_passno": 0,
"mount_point": "/opt/test1",
"name": "test1",
"pool": "foo",
"raid_chunk_size": null,
"raid_device_count": null,
"raid_level": null,
"raid_metadata_version": null,
"raid_spare_count": null,
"size": 0,
"state": "present",
"type": "partition"
}
]
}
},
"ansible_loop_var": "item",
"changed": false,
"item": {
"disks": [
"nvme0n1"
],
"encryption": false,
"encryption_cipher": null,
"encryption_key_file": null,
"encryption_key_size": null,
"encryption_luks_version": null,
"encryption_passphrase": null,
"name": "foo",
"raid_chunk_size": null,
"raid_device_count": null,
"raid_level": null,
"raid_metadata_version": null,
"raid_spare_count": null,
"state": "present",
"type": "partition",
"volumes": [
{
"encryption": false,
"encryption_passphrase": "yabbadabbadoo",
"mount_point": "/opt/test1",
"name": "test1",
"type": "partition"
}
]
}
}

TASK [storage : Apply defaults to pools and volumes [5/6]] **********************************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:61
ok: [localhost] => (item={'state': 'present', 'type': 'partition', 'encryption': False, 'encryption_passphrase': None, 'encryption_key_file': None, 'encryption_cipher': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, 'name': 'foo', 'disks': ['nvme0n1'], 'volumes': [{'name': 'test1', 'type': 'partition', 'mount_point': '/opt/test1', 'encryption': False, 'encryption_passphrase': 'yabbadabbadoo'}]}) => {
"ansible_facts": {
"_storage_pools": [
{
"disks": [
"nvme0n1"
],
"encryption": false,
"encryption_cipher": null,
"encryption_key_file": null,
"encryption_key_size": null,
"encryption_luks_version": null,
"encryption_passphrase": null,
"name": "foo",
"raid_chunk_size": null,
"raid_device_count": null,
"raid_level": null,
"raid_metadata_version": null,
"raid_spare_count": null,
"state": "present",
"type": "partition",
"volumes": [
{
"encryption": false,
"encryption_cipher": null,
"encryption_key_file": null,
"encryption_key_size": null,
"encryption_luks_version": null,
"encryption_passphrase": "yabbadabbadoo",
"fs_create_options": "",
"fs_label": "",
"fs_overwrite_existing": true,
"fs_type": "xfs",
"mount_check": 0,
"mount_device_identifier": "uuid",
"mount_options": "defaults",
"mount_passno": 0,
"mount_point": "/opt/test1",
"name": "test1",
"pool": "foo",
"raid_chunk_size": null,
"raid_device_count": null,
"raid_level": null,
"raid_metadata_version": null,
"raid_spare_count": null,
"size": 0,
"state": "present",
"type": "partition"
}
]
}
]
},
"ansible_index_var": "idx",
"ansible_loop_var": "pool",
"changed": false,
"idx": 0,
"pool": {
"disks": [
"nvme0n1"
],
"encryption": false,
"encryption_cipher": null,
"encryption_key_file": null,
"encryption_key_size": null,
"encryption_luks_version": null,
"encryption_passphrase": null,
"name": "foo",
"raid_chunk_size": null,
"raid_device_count": null,
"raid_level": null,
"raid_metadata_version": null,
"raid_spare_count": null,
"state": "present",
"type": "partition",
"volumes": [
{
"encryption": false,
"encryption_passphrase": "yabbadabbadoo",
"mount_point": "/opt/test1",
"name": "test1",
"type": "partition"
}
]
}
}

TASK [storage : Apply defaults to pools and volumes [6/6]] **********************************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:72

TASK [storage : debug] **********************************************************************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:84
ok: [localhost] => {
"_storage_pools": [
{
"disks": [
"nvme0n1"
],
"encryption": false,
"encryption_cipher": null,
"encryption_key_file": null,
"encryption_key_size": null,
"encryption_luks_version": null,
"encryption_passphrase": null,
"name": "foo",
"raid_chunk_size": null,
"raid_device_count": null,
"raid_level": null,
"raid_metadata_version": null,
"raid_spare_count": null,
"state": "present",
"type": "partition",
"volumes": [
{
"encryption": false,
"encryption_cipher": null,
"encryption_key_file": null,
"encryption_key_size": null,
"encryption_luks_version": null,
"encryption_passphrase": "yabbadabbadoo",
"fs_create_options": "",
"fs_label": "",
"fs_overwrite_existing": true,
"fs_type": "xfs",
"mount_check": 0,
"mount_device_identifier": "uuid",
"mount_options": "defaults",
"mount_passno": 0,
"mount_point": "/opt/test1",
"name": "test1",
"pool": "foo",
"raid_chunk_size": null,
"raid_device_count": null,
"raid_level": null,
"raid_metadata_version": null,
"raid_spare_count": null,
"size": 0,
"state": "present",
"type": "partition"
}
]
}
]
}

TASK [storage : debug] **********************************************************************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:87
ok: [localhost] => {
"_storage_volumes": []
}

TASK [storage : get required packages] ******************************************************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:90
ESTABLISH LOCAL CONNECTION FOR USER: root
EXEC /bin/sh -c 'echo ~root && sleep 0'
EXEC /bin/sh -c '( umask 77 && mkdir -p "echo /root/.ansible/tmp"&& mkdir /root/.ansible/tmp/ansible-tmp-1596551742.6613111-18145-68523253749948 && echo ansible-tmp-1596551742.6613111-18145-68523253749948="echo /root/.ansible/tmp/ansible-tmp-1596551742.6613111-18145-68523253749948" ) && sleep 0'
Using module file /root/test/storage/library/blivet.py
PUT /root/.ansible/tmp/ansible-local-15515o7pn3a74/tmp6b23aekf TO /root/.ansible/tmp/ansible-tmp-1596551742.6613111-18145-68523253749948/AnsiballZ_blivet.py
EXEC /bin/sh -c 'chmod u+x /root/.ansible/tmp/ansible-tmp-1596551742.6613111-18145-68523253749948/ /root/.ansible/tmp/ansible-tmp-1596551742.6613111-18145-68523253749948/AnsiballZ_blivet.py && sleep 0'
EXEC /bin/sh -c '/usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1596551742.6613111-18145-68523253749948/AnsiballZ_blivet.py && sleep 0'
EXEC /bin/sh -c 'rm -f -r /root/.ansible/tmp/ansible-tmp-1596551742.6613111-18145-68523253749948/ > /dev/null 2>&1 && sleep 0'
ok: [localhost] => {
"actions": [],
"changed": false,
"crypts": [],
"invocation": {
"module_args": {
"disklabel_type": null,
"packages_only": true,
"pools": [
{
"disks": [
"nvme0n1"
],
"encryption": false,
"encryption_cipher": null,
"encryption_key_file": null,
"encryption_key_size": null,
"encryption_luks_version": null,
"encryption_passphrase": null,
"name": "foo",
"raid_chunk_size": null,
"raid_device_count": null,
"raid_level": null,
"raid_metadata_version": null,
"raid_spare_count": null,
"state": "present",
"type": "partition",
"volumes": [
{
"encryption": false,
"encryption_cipher": null,
"encryption_key_file": null,
"encryption_key_size": null,
"encryption_luks_version": null,
"encryption_passphrase": "yabbadabbadoo",
"fs_create_options": "",
"fs_label": "",
"fs_overwrite_existing": true,
"fs_type": "xfs",
"mount_check": 0,
"mount_device_identifier": "uuid",
"mount_options": "defaults",
"mount_passno": 0,
"mount_point": "/opt/test1",
"name": "test1",
"pool": "foo",
"raid_chunk_size": null,
"raid_device_count": null,
"raid_level": null,
"raid_metadata_version": null,
"raid_spare_count": null,
"size": 0,
"state": "present",
"type": "partition"
}
]
}
],
"safe_mode": true,
"use_partitions": null,
"volumes": []
}
},
"leaves": [],
"mounts": [],
"packages": [
"xfsprogs"
],
"pools": [],
"volumes": []
}

TASK [storage : make sure required packages are installed] **********************************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:99
ESTABLISH LOCAL CONNECTION FOR USER: root
EXEC /bin/sh -c 'echo ~root && sleep 0'
EXEC /bin/sh -c '( umask 77 && mkdir -p "echo /root/.ansible/tmp"&& mkdir /root/.ansible/tmp/ansible-tmp-1596551746.6839507-18212-14130022943421 && echo ansible-tmp-1596551746.6839507-18212-14130022943421="echo /root/.ansible/tmp/ansible-tmp-1596551746.6839507-18212-14130022943421" ) && sleep 0'
Using module file /usr/lib/python3.6/site-packages/ansible/modules/packaging/os/dnf.py
PUT /root/.ansible/tmp/ansible-local-15515o7pn3a74/tmpg04gg6cv TO /root/.ansible/tmp/ansible-tmp-1596551746.6839507-18212-14130022943421/AnsiballZ_dnf.py
EXEC /bin/sh -c 'chmod u+x /root/.ansible/tmp/ansible-tmp-1596551746.6839507-18212-14130022943421/ /root/.ansible/tmp/ansible-tmp-1596551746.6839507-18212-14130022943421/AnsiballZ_dnf.py && sleep 0'
EXEC /bin/sh -c '/usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1596551746.6839507-18212-14130022943421/AnsiballZ_dnf.py && sleep 0'
EXEC /bin/sh -c 'rm -f -r /root/.ansible/tmp/ansible-tmp-1596551746.6839507-18212-14130022943421/ > /dev/null 2>&1 && sleep 0'
ok: [localhost] => {
"changed": false,
"invocation": {
"module_args": {
"allow_downgrade": false,
"autoremove": false,
"bugfix": false,
"conf_file": null,
"disable_excludes": null,
"disable_gpg_check": false,
"disable_plugin": [],
"disablerepo": [],
"download_dir": null,
"download_only": false,
"enable_plugin": [],
"enablerepo": [],
"exclude": [],
"install_repoquery": true,
"install_weak_deps": true,
"installroot": "/",
"list": null,
"lock_timeout": 30,
"name": [
"xfsprogs"
],
"releasever": null,
"security": false,
"skip_broken": false,
"state": "present",
"update_cache": false,
"update_only": false,
"validate_certs": true
}
},
"msg": "Nothing to do",
"rc": 0,
"results": []
}

TASK [storage : manage the pools and volumes to match the specified state] ******************************************************************************************************************************
task path: /root/test/storage/tasks/main-blivet.yml:104
ESTABLISH LOCAL CONNECTION FOR USER: root
EXEC /bin/sh -c 'echo ~root && sleep 0'
EXEC /bin/sh -c '( umask 77 && mkdir -p "echo /root/.ansible/tmp"&& mkdir /root/.ansible/tmp/ansible-tmp-1596551750.5523744-18228-104064955521611 && echo ansible-tmp-1596551750.5523744-18228-104064955521611="echo /root/.ansible/tmp/ansible-tmp-1596551750.5523744-18228-104064955521611" ) && sleep 0'
Using module file /root/test/storage/library/blivet.py
PUT /root/.ansible/tmp/ansible-local-15515o7pn3a74/tmpljru2714 TO /root/.ansible/tmp/ansible-tmp-1596551750.5523744-18228-104064955521611/AnsiballZ_blivet.py
EXEC /bin/sh -c 'chmod u+x /root/.ansible/tmp/ansible-tmp-1596551750.5523744-18228-104064955521611/ /root/.ansible/tmp/ansible-tmp-1596551750.5523744-18228-104064955521611/AnsiballZ_blivet.py && sleep 0'
EXEC /bin/sh -c '/usr/libexec/platform-python /root/.ansible/tmp/ansible-tmp-1596551750.5523744-18228-104064955521611/AnsiballZ_blivet.py && sleep 0'
EXEC /bin/sh -c 'rm -f -r /root/.ansible/tmp/ansible-tmp-1596551750.5523744-18228-104064955521611/ > /dev/null 2>&1 && sleep 0'
The full traceback is:
File "/tmp/ansible_blivet_payload__mpl_uj7/ansible_blivet_payload.zip/ansible/modules/blivet.py", line 1161, in run_module
File "/tmp/ansible_blivet_payload__mpl_uj7/ansible_blivet_payload.zip/ansible/modules/blivet.py", line 898, in manage_pool
File "/tmp/ansible_blivet_payload__mpl_uj7/ansible_blivet_payload.zip/ansible/modules/blivet.py", line 804, in manage
File "/tmp/ansible_blivet_payload__mpl_uj7/ansible_blivet_payload.zip/ansible/modules/blivet.py", line 786, in _manage_volumes
File "/tmp/ansible_blivet_payload__mpl_uj7/ansible_blivet_payload.zip/ansible/modules/blivet.py", line 403, in manage
File "/tmp/ansible_blivet_payload__mpl_uj7/ansible_blivet_payload.zip/ansible/modules/blivet.py", line 479, in _create
fatal: [localhost]: FAILED! => {
"actions": [],
"changed": false,
"crypts": [],
"invocation": {
"module_args": {
"disklabel_type": null,
"packages_only": false,
"pools": [
{
"disks": [
"nvme0n1"
],
"encryption": false,
"encryption_cipher": null,
"encryption_key_file": null,
"encryption_key_size": null,
"encryption_luks_version": null,
"encryption_passphrase": null,
"name": "foo",
"raid_chunk_size": null,
"raid_device_count": null,
"raid_level": null,
"raid_metadata_version": null,
"raid_spare_count": null,
"state": "present",
"type": "partition",
"volumes": [
{
"encryption": false,
"encryption_cipher": null,
"encryption_key_file": null,
"encryption_key_size": null,
"encryption_luks_version": null,
"encryption_passphrase": "yabbadabbadoo",
"fs_create_options": "",
"fs_label": "",
"fs_overwrite_existing": true,
"fs_type": "xfs",
"mount_check": 0,
"mount_device_identifier": "uuid",
"mount_options": "defaults",
"mount_passno": 0,
"mount_point": "/opt/test1",
"name": "test1",
"pool": "foo",
"raid_chunk_size": null,
"raid_device_count": null,
"raid_level": null,
"raid_metadata_version": null,
"raid_spare_count": null,
"size": 0,
"state": "present",
"type": "partition"
}
]
}
],
"safe_mode": false,
"use_partitions": null,
"volumes": []
}
},
"leaves": [],
"mounts": [],
"msg": "partition allocation failed for volume 'test1'",
"packages": [],
"pools": [],
"volumes": []
}

PLAY RECAP **********************************************************************************************************************************************************************************************
localhost : ok=117 changed=5 unreachable=0 failed=1 skipped=54 rescued=0 ignored=0

/tmp/blivet.log

# cat /tmp/blivet.log | tail -300

  PVs = ['existing 277.81 GiB partition sda3 (30) with existing lvmpv']
  LVs = ['existing 199.97 GiB lvmlv rhel_storageqe-62-home (43) with existing xfs '
 'filesystem',
 'existing 70 GiB lvmlv rhel_storageqe-62-root (56) with existing xfs '
 'filesystem',
 'existing 7.84 GiB lvmlv rhel_storageqe-62-swap (69) with existing swap']
  segment type = linear percent = 0
  VG space used = 70 GiB
2020-08-04 10:35:53,708 INFO program/MainThread: Running [9] dmsetup info -co subsystem --noheadings rhel_storageqe--62-root ...
2020-08-04 10:35:53,714 INFO program/MainThread: stdout[9]: LVM

2020-08-04 10:35:53,715 INFO program/MainThread: stderr[9]:
2020-08-04 10:35:53,715 INFO program/MainThread: ...done [9] (exit code: 0)
2020-08-04 10:35:53,719 DEBUG blivet/MainThread: DeviceTree.handle_format: name: rhel_storageqe-62-root ;
2020-08-04 10:35:53,719 DEBUG blivet/MainThread: no type or existing type for rhel_storageqe--62-root, bailing
2020-08-04 10:35:53,722 DEBUG blivet/MainThread: DeviceTree.handle_device: name: rhel_storageqe--62-swap ; info: {'DEVLINKS': '/dev/disk/by-id/dm-uuid-LVM-DrL6pCi2vQjtrIPEXfDd43GVwv6yUXdwM5iHQzFnhGKLdXhcxFch2QeRkI3VKiNr '
'/dev/rhel_storageqe-62/swap '
'/dev/disk/by-id/dm-name-rhel_storageqe--62-swap '
'/dev/disk/by-uuid/aeaa2293-343b-4399-afa5-7d2ceafac06e '
'/dev/mapper/rhel_storageqe--62-swap',
'DEVNAME': '/dev/dm-1',
'DEVPATH': '/devices/virtual/block/dm-1',
'DEVTYPE': 'disk',
'DM_ACTIVATION': '1',
'DM_LV_NAME': 'swap',
'DM_NAME': 'rhel_storageqe--62-swap',
'DM_SUSPENDED': '0',
'DM_UDEV_DISABLE_LIBRARY_FALLBACK_FLAG': '1',
'DM_UDEV_PRIMARY_SOURCE_FLAG': '1',
'DM_UDEV_RULES_VSN': '2',
'DM_UUID': 'LVM-DrL6pCi2vQjtrIPEXfDd43GVwv6yUXdwM5iHQzFnhGKLdXhcxFch2QeRkI3VKiNr',
'DM_VG_NAME': 'rhel_storageqe-62',
'ID_FS_TYPE': 'swap',
'ID_FS_USAGE': 'other',
'ID_FS_UUID': 'aeaa2293-343b-4399-afa5-7d2ceafac06e',
'ID_FS_UUID_ENC': 'aeaa2293-343b-4399-afa5-7d2ceafac06e',
'ID_FS_VERSION': '1',
'MAJOR': '253',
'MINOR': '1',
'SUBSYSTEM': 'block',
'SYS_NAME': 'dm-1',
'SYS_PATH': '/sys/devices/virtual/block/dm-1',
'TAGS': ':systemd:',
'USEC_INITIALIZED': '8032184'} ;
2020-08-04 10:35:53,723 INFO blivet/MainThread: scanning rhel_storageqe--62-swap (/sys/devices/virtual/block/dm-1)...
2020-08-04 10:35:53,727 DEBUG blivet/MainThread: DeviceTree.get_device_by_name: name: rhel_storageqe--62-swap ; incomplete: False ; hidden: False ;
2020-08-04 10:35:53,730 DEBUG blivet/MainThread: DeviceTree.get_device_by_name returned existing 7.84 GiB lvmlv rhel_storageqe-62-swap (69) with existing swap
2020-08-04 10:35:53,735 DEBUG blivet/MainThread: rhel_storageqe-62 size is 277.81 GiB
2020-08-04 10:35:53,737 DEBUG blivet/MainThread: vg rhel_storageqe-62 has 0 B free
2020-08-04 10:35:53,738 DEBUG blivet/MainThread: rhel_storageqe-62 size is 277.81 GiB
2020-08-04 10:35:53,740 DEBUG blivet/MainThread: vg rhel_storageqe-62 has 0 B free
2020-08-04 10:35:53,730 INFO blivet/MainThread: got device: LVMLogicalVolumeDevice instance (0x7f744a217748) --
name = rhel_storageqe-62-swap status = True id = 69
children = []
parents = ['existing 277.81 GiB lvmvg rhel_storageqe-62 (39)']
uuid = M5iHQz-FnhG-KLdX-hcxF-ch2Q-eRkI-3VKiNr size = 7.84 GiB
format = existing swap
major = 0 minor = 0 exists = True protected = False
sysfs path = /sys/devices/virtual/block/dm-1
target size = 7.84 GiB path = /dev/mapper/rhel_storageqe--62-swap
format args = [] original_format = swap target = None dm_uuid = None VG device = LVMVolumeGroupDevice instance (0x7f744a2b9198) --
name = rhel_storageqe-62 status = True id = 39
children = ['existing 199.97 GiB lvmlv rhel_storageqe-62-home (43) with existing xfs '
'filesystem',
'existing 70 GiB lvmlv rhel_storageqe-62-root (56) with existing xfs '
'filesystem',
'existing 7.84 GiB lvmlv rhel_storageqe-62-swap (69) with existing swap']
parents = ['existing 277.81 GiB partition sda3 (30) with existing lvmpv']
uuid = DrL6pC-i2vQ-jtrI-PEXf-Dd43-GVwv-6yUXdw size = 277.81 GiB
format = existing None
major = 0 minor = 0 exists = True protected = False
sysfs path =
target size = 277.81 GiB path = /dev/rhel_storageqe--62
format args = [] original_format = None free = 0 B PE Size = 4 MiB PE Count = 71119
PE Free = 0 PV Count = 1
modified = False extents = 71119 free space = 0 B
free extents = 0 reserved percent = 0 reserved space = 0 B
PVs = ['existing 277.81 GiB partition sda3 (30) with existing lvmpv']
LVs = ['existing 199.97 GiB lvmlv rhel_storageqe-62-home (43) with existing xfs '
'filesystem',
'existing 70 GiB lvmlv rhel_storageqe-62-root (56) with existing xfs '
'filesystem',
'existing 7.84 GiB lvmlv rhel_storageqe-62-swap (69) with existing swap']
segment type = linear percent = 0
VG space used = 7.84 GiB
2020-08-04 10:35:53,741 INFO program/MainThread: Running [10] dmsetup info -co subsystem --noheadings rhel_storageqe--62-swap ...
2020-08-04 10:35:53,747 INFO program/MainThread: stdout[10]: LVM

2020-08-04 10:35:53,747 INFO program/MainThread: stderr[10]:
2020-08-04 10:35:53,747 INFO program/MainThread: ...done [10] (exit code: 0)
2020-08-04 10:35:53,751 DEBUG blivet/MainThread: DeviceTree.handle_format: name: rhel_storageqe-62-swap ;
2020-08-04 10:35:53,751 DEBUG blivet/MainThread: no type or existing type for rhel_storageqe--62-swap, bailing
2020-08-04 10:35:53,755 DEBUG blivet/MainThread: DeviceTree.handle_device: name: rhel_storageqe--62-home ; info: {'DEVLINKS': '/dev/rhel_storageqe-62/home '
'/dev/disk/by-id/dm-name-rhel_storageqe--62-home '
'/dev/mapper/rhel_storageqe--62-home '
'/dev/disk/by-uuid/25082e54-84e7-4945-87a4-532894d69113 '
'/dev/disk/by-id/dm-uuid-LVM-DrL6pCi2vQjtrIPEXfDd43GVwv6yUXdwN0C3KvQUeXpTjzOZ9P1blbg5bVV4tzxT',
'DEVNAME': '/dev/dm-2',
'DEVPATH': '/devices/virtual/block/dm-2',
'DEVTYPE': 'disk',
'DM_ACTIVATION': '1',
'DM_LV_NAME': 'home',
'DM_NAME': 'rhel_storageqe--62-home',
'DM_SUSPENDED': '0',
'DM_UDEV_DISABLE_LIBRARY_FALLBACK_FLAG': '1',
'DM_UDEV_PRIMARY_SOURCE_FLAG': '1',
'DM_UDEV_RULES_VSN': '2',
'DM_UUID': 'LVM-DrL6pCi2vQjtrIPEXfDd43GVwv6yUXdwN0C3KvQUeXpTjzOZ9P1blbg5bVV4tzxT',
'DM_VG_NAME': 'rhel_storageqe-62',
'ID_FS_TYPE': 'xfs',
'ID_FS_USAGE': 'filesystem',
'ID_FS_UUID': '25082e54-84e7-4945-87a4-532894d69113',
'ID_FS_UUID_ENC': '25082e54-84e7-4945-87a4-532894d69113',
'MAJOR': '253',
'MINOR': '2',
'SUBSYSTEM': 'block',
'SYS_NAME': 'dm-2',
'SYS_PATH': '/sys/devices/virtual/block/dm-2',
'TAGS': ':systemd:',
'USEC_INITIALIZED': '17451740'} ;
2020-08-04 10:35:53,755 INFO blivet/MainThread: scanning rhel_storageqe--62-home (/sys/devices/virtual/block/dm-2)...
2020-08-04 10:35:53,759 DEBUG blivet/MainThread: DeviceTree.get_device_by_name: name: rhel_storageqe--62-home ; incomplete: False ; hidden: False ;
2020-08-04 10:35:53,762 DEBUG blivet/MainThread: DeviceTree.get_device_by_name returned existing 199.97 GiB lvmlv rhel_storageqe-62-home (43) with existing xfs filesystem
2020-08-04 10:35:53,767 DEBUG blivet/MainThread: rhel_storageqe-62 size is 277.81 GiB
2020-08-04 10:35:53,769 DEBUG blivet/MainThread: vg rhel_storageqe-62 has 0 B free
2020-08-04 10:35:53,770 DEBUG blivet/MainThread: rhel_storageqe-62 size is 277.81 GiB
2020-08-04 10:35:53,772 DEBUG blivet/MainThread: vg rhel_storageqe-62 has 0 B free
2020-08-04 10:35:53,762 INFO blivet/MainThread: got device: LVMLogicalVolumeDevice instance (0x7f744a2b9ac8) --
name = rhel_storageqe-62-home status = True id = 43
children = []
parents = ['existing 277.81 GiB lvmvg rhel_storageqe-62 (39)']
uuid = N0C3Kv-QUeX-pTjz-OZ9P-1blb-g5bV-V4tzxT size = 199.97 GiB
format = existing xfs filesystem
major = 0 minor = 0 exists = True protected = False
sysfs path = /sys/devices/virtual/block/dm-2
target size = 199.97 GiB path = /dev/mapper/rhel_storageqe--62-home
format args = [] original_format = xfs target = None dm_uuid = None VG device = LVMVolumeGroupDevice instance (0x7f744a2b9198) --
name = rhel_storageqe-62 status = True id = 39
children = ['existing 199.97 GiB lvmlv rhel_storageqe-62-home (43) with existing xfs '
'filesystem',
'existing 70 GiB lvmlv rhel_storageqe-62-root (56) with existing xfs '
'filesystem',
'existing 7.84 GiB lvmlv rhel_storageqe-62-swap (69) with existing swap']
parents = ['existing 277.81 GiB partition sda3 (30) with existing lvmpv']
uuid = DrL6pC-i2vQ-jtrI-PEXf-Dd43-GVwv-6yUXdw size = 277.81 GiB
format = existing None
major = 0 minor = 0 exists = True protected = False
sysfs path =
target size = 277.81 GiB path = /dev/rhel_storageqe--62
format args = [] original_format = None free = 0 B PE Size = 4 MiB PE Count = 71119
PE Free = 0 PV Count = 1
modified = False extents = 71119 free space = 0 B
free extents = 0 reserved percent = 0 reserved space = 0 B
PVs = ['existing 277.81 GiB partition sda3 (30) with existing lvmpv']
LVs = ['existing 199.97 GiB lvmlv rhel_storageqe-62-home (43) with existing xfs '
'filesystem',
'existing 70 GiB lvmlv rhel_storageqe-62-root (56) with existing xfs '
'filesystem',
'existing 7.84 GiB lvmlv rhel_storageqe-62-swap (69) with existing swap']
segment type = linear percent = 0
VG space used = 199.97 GiB
2020-08-04 10:35:53,773 INFO program/MainThread: Running [11] dmsetup info -co subsystem --noheadings rhel_storageqe--62-home ...
2020-08-04 10:35:53,779 INFO program/MainThread: stdout[11]: LVM

2020-08-04 10:35:53,779 INFO program/MainThread: stderr[11]:
2020-08-04 10:35:53,779 INFO program/MainThread: ...done [11] (exit code: 0)
2020-08-04 10:35:53,783 DEBUG blivet/MainThread: DeviceTree.handle_format: name: rhel_storageqe-62-home ;
2020-08-04 10:35:53,783 DEBUG blivet/MainThread: no type or existing type for rhel_storageqe--62-home, bailing
2020-08-04 10:35:53,787 DEBUG blivet/MainThread: DeviceTree.handle_device: name: luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5 ; info: {'DEVLINKS': '/dev/disk/by-id/dm-uuid-CRYPT-LUKS2-d1731709dfb24096a9c06e332d6e95e5-luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5 '
'/dev/disk/by-uuid/f1698797-4201-47e1-ac23-985ab6927e03 '
'/dev/disk/by-id/dm-name-luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5 '
'/dev/mapper/luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5',
'DEVNAME': '/dev/dm-3',
'DEVPATH': '/devices/virtual/block/dm-3',
'DEVTYPE': 'disk',
'DM_NAME': 'luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5',
'DM_SUSPENDED': '0',
'DM_UDEV_DISABLE_LIBRARY_FALLBACK_FLAG': '1',
'DM_UDEV_PRIMARY_SOURCE_FLAG': '1',
'DM_UDEV_RULES_VSN': '2',
'DM_UUID': 'CRYPT-LUKS2-d1731709dfb24096a9c06e332d6e95e5-luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5',
'ID_FS_TYPE': 'xfs',
'ID_FS_USAGE': 'filesystem',
'ID_FS_UUID': 'f1698797-4201-47e1-ac23-985ab6927e03',
'ID_FS_UUID_ENC': 'f1698797-4201-47e1-ac23-985ab6927e03',
'MAJOR': '253',
'MINOR': '3',
'SUBSYSTEM': 'block',
'SYS_NAME': 'dm-3',
'SYS_PATH': '/sys/devices/virtual/block/dm-3',
'TAGS': ':systemd:',
'USEC_INITIALIZED': '375553204'} ;
2020-08-04 10:35:53,787 INFO blivet/MainThread: scanning luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5 (/sys/devices/virtual/block/dm-3)...
2020-08-04 10:35:53,791 DEBUG blivet/MainThread: DeviceTree.get_device_by_name: name: luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5 ; incomplete: False ; hidden: False ;
2020-08-04 10:35:53,794 DEBUG blivet/MainThread: DeviceTree.get_device_by_name returned None
2020-08-04 10:35:53,794 INFO program/MainThread: Running [12] dmsetup info -co subsystem --noheadings luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5 ...
2020-08-04 10:35:53,800 INFO program/MainThread: stdout[12]: CRYPT

2020-08-04 10:35:53,800 INFO program/MainThread: stderr[12]:
2020-08-04 10:35:53,800 INFO program/MainThread: ...done [12] (exit code: 0)
2020-08-04 10:35:53,806 DEBUG blivet/MainThread: DeviceTree.get_device_by_name: name: nvme0n1p1 ; incomplete: False ; hidden: False ;
2020-08-04 10:35:53,810 DEBUG blivet/MainThread: DeviceTree.get_device_by_name returned existing 745.21 GiB partition nvme0n1p1 (213) with existing luks
2020-08-04 10:35:53,811 DEBUG blivet/MainThread: get_format('None') returning DeviceFormat instance with object id 224
2020-08-04 10:35:53,815 DEBUG blivet/MainThread: PartitionDevice.add_child: name: nvme0n1p1 ; child: luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5 ; kids: 0 ;
2020-08-04 10:35:53,815 DEBUG blivet/MainThread: get_format('None') returning DeviceFormat instance with object id 225
2020-08-04 10:35:53,818 DEBUG blivet/MainThread: LUKSDevice._set_format: luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5 ; type: None ; current: None ;
2020-08-04 10:35:53,822 DEBUG blivet/MainThread: LUKSDevice.update_sysfs_path: luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5 ; status: True ;
2020-08-04 10:35:53,822 DEBUG blivet/MainThread: luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5 sysfs_path set to /sys/devices/virtual/block/dm-3
2020-08-04 10:35:53,826 DEBUG blivet/MainThread: LUKSDevice.read_current_size: exists: True ; path: /dev/mapper/luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5 ; sysfs_path: /sys/devices/virtual/block/dm-3 ;
2020-08-04 10:35:53,826 DEBUG blivet/MainThread: updated luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5 size to 745.2 GiB (745.2 GiB)
2020-08-04 10:35:53,827 INFO blivet/MainThread: added luks/dm-crypt luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5 (id 223) to device tree
2020-08-04 10:35:53,827 INFO blivet/MainThread: got device: LUKSDevice instance (0x7f744a236860) --
name = luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5 status = True id = 223
children = []
parents = ['existing 745.21 GiB partition nvme0n1p1 (213) with existing luks']
uuid = None size = 745.2 GiB
format = existing None
major = 0 minor = 0 exists = True protected = False
sysfs path = /sys/devices/virtual/block/dm-3
target size = 0 B path = /dev/mapper/luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5
format args = [] original_format = None target = crypt dm_uuid = None
2020-08-04 10:35:53,828 INFO program/MainThread: Running [13] dmsetup info -co subsystem --noheadings luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5 ...
2020-08-04 10:35:53,833 INFO program/MainThread: stdout[13]: CRYPT

2020-08-04 10:35:53,834 INFO program/MainThread: stderr[13]:
2020-08-04 10:35:53,834 INFO program/MainThread: ...done [13] (exit code: 0)
2020-08-04 10:35:53,834 INFO program/MainThread: Running [14] dmsetup info -co subsystem --noheadings luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5 ...
2020-08-04 10:35:53,840 INFO program/MainThread: stdout[14]: CRYPT

2020-08-04 10:35:53,840 INFO program/MainThread: stderr[14]:
2020-08-04 10:35:53,840 INFO program/MainThread: ...done [14] (exit code: 0)
2020-08-04 10:35:53,844 DEBUG blivet/MainThread: DeviceTree.handle_format: name: luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5 ;
2020-08-04 10:35:53,848 DEBUG blivet/MainThread: AppleBootstrapFS.supported: supported: True ;
2020-08-04 10:35:53,848 DEBUG blivet/MainThread: get_format('appleboot') returning AppleBootstrapFS instance with object id 227
2020-08-04 10:35:53,852 DEBUG blivet/MainThread: EFIFS.supported: supported: True ;
2020-08-04 10:35:53,852 DEBUG blivet/MainThread: get_format('efi') returning EFIFS instance with object id 228
2020-08-04 10:35:53,856 DEBUG blivet/MainThread: MacEFIFS.supported: supported: True ;
2020-08-04 10:35:53,856 DEBUG blivet/MainThread: get_format('macefi') returning MacEFIFS instance with object id 229
2020-08-04 10:35:53,861 DEBUG blivet/MainThread: MacEFIFS.supported: supported: True ;
2020-08-04 10:35:53,861 DEBUG blivet/MainThread: get_format('macefi') returning MacEFIFS instance with object id 230
2020-08-04 10:35:53,861 INFO blivet/MainThread: type detected on 'luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5' is 'xfs'
2020-08-04 10:35:53,864 DEBUG blivet/MainThread: XFS.supported: supported: True ;
2020-08-04 10:35:53,864 DEBUG blivet/MainThread: get_format('xfs') returning XFS instance with object id 231
2020-08-04 10:35:53,867 DEBUG blivet/MainThread: LUKSDevice._set_format: luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5 ; type: xfs ; current: None ;
2020-08-04 10:35:53,868 INFO blivet/MainThread: got format: existing xfs filesystem
2020-08-04 10:35:53,868 INFO program/MainThread: Running... udevadm settle --timeout=300
2020-08-04 10:35:53,888 DEBUG program/MainThread: Return code: 0
2020-08-04 10:35:53,917 INFO blivet/MainThread: edd: MBR signature on nvme0n1 is zero. new disk image?
2020-08-04 10:35:53,917 INFO blivet/MainThread: edd: MBR signature on sda is zero. new disk image?
2020-08-04 10:35:53,917 INFO blivet/MainThread: edd: MBR signature on sdl is zero. new disk image?
2020-08-04 10:35:53,917 INFO blivet/MainThread: edd: collected mbr signatures: {}
2020-08-04 10:35:53,924 DEBUG blivet/MainThread: DeviceTree.get_device_by_path: path: /dev/mapper/rhel_storageqe--62-root ; incomplete: False ; hidden: False ;
2020-08-04 10:35:53,929 DEBUG blivet/MainThread: DeviceTree.get_device_by_path returned existing 70 GiB lvmlv rhel_storageqe-62-root (56) with existing xfs filesystem
2020-08-04 10:35:53,929 DEBUG blivet/MainThread: resolved '/dev/mapper/rhel_storageqe--62-root' to 'rhel_storageqe-62-root' (lvmlv)
2020-08-04 10:35:53,930 DEBUG blivet/MainThread: resolved 'UUID=02369863-9365-4c2c-a2c4-141b221fdf33' to 'sda2' (partition)
2020-08-04 10:35:53,930 DEBUG blivet/MainThread: resolved 'UUID=E3F6-B0B3' to 'sda1' (partition)
2020-08-04 10:35:53,933 DEBUG blivet/MainThread: DeviceTree.get_device_by_path: path: /dev/mapper/rhel_storageqe--62-home ; incomplete: False ; hidden: False ;
2020-08-04 10:35:53,937 DEBUG blivet/MainThread: DeviceTree.get_device_by_path returned existing 199.97 GiB lvmlv rhel_storageqe-62-home (43) with existing xfs filesystem
2020-08-04 10:35:53,938 DEBUG blivet/MainThread: resolved '/dev/mapper/rhel_storageqe--62-home' to 'rhel_storageqe-62-home' (lvmlv)
2020-08-04 10:35:53,941 DEBUG blivet/MainThread: DeviceTree.get_device_by_path: path: /dev/mapper/rhel_storageqe--62-swap ; incomplete: False ; hidden: False ;
2020-08-04 10:35:53,945 DEBUG blivet/MainThread: DeviceTree.get_device_by_path returned existing 7.84 GiB lvmlv rhel_storageqe-62-swap (69) with existing swap
2020-08-04 10:35:53,945 DEBUG blivet/MainThread: resolved '/dev/mapper/rhel_storageqe--62-swap' to 'rhel_storageqe-62-swap' (lvmlv)
2020-08-04 10:35:53,948 DEBUG blivet/MainThread: DeviceTree.get_device_by_path: path: /dev/mapper/luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5 ; incomplete: False ; hidden: False ;
2020-08-04 10:35:53,952 DEBUG blivet/MainThread: DeviceTree.get_device_by_path returned existing 745.2 GiB luks/dm-crypt luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5 (223) with existing xfs filesystem
2020-08-04 10:35:53,952 DEBUG blivet/MainThread: resolved '/dev/mapper/luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5' to 'luks-d1731709-dfb2-4096-a9c0-6e332d6e95e5' (luks/dm-crypt)
2020-08-04 10:35:53,956 DEBUG blivet/MainThread: DeviceTree.get_device_by_name: name: nvme0n1 ; incomplete: False ; hidden: False ;
2020-08-04 10:35:53,960 DEBUG blivet/MainThread: DeviceTree.get_device_by_name returned existing 745.21 GiB disk nvme0n1 (203) with existing gpt disklabel
2020-08-04 10:35:53,960 DEBUG blivet/MainThread: resolved 'nvme0n1' to 'nvme0n1' (disk)
2020-08-04 10:35:53,964 DEBUG blivet/MainThread: DeviceTree.get_device_by_name: name: nvme0n11 ; incomplete: False ; hidden: False ;
2020-08-04 10:35:53,968 DEBUG blivet/MainThread: DeviceTree.get_device_by_name returned None
2020-08-04 10:35:53,971 DEBUG blivet/MainThread: DeviceTree.get_device_by_path: path: /dev/nvme0n11 ; incomplete: False ; hidden: False ;
2020-08-04 10:35:53,975 DEBUG blivet/MainThread: DeviceTree.get_device_by_path returned None
2020-08-04 10:35:53,976 DEBUG blivet/MainThread: failed to resolve '/dev/nvme0n11'
2020-08-04 10:35:53,980 DEBUG blivet/MainThread: XFS.supported: supported: True ;
2020-08-04 10:35:53,980 DEBUG blivet/MainThread: get_format('xfs') returning XFS instance with object id 233
2020-08-04 10:35:53,984 DEBUG blivet/MainThread: XFS.supported: supported: True ;
2020-08-04 10:35:53,984 DEBUG blivet/MainThread: get_format('None') returning DeviceFormat instance with object id 235
2020-08-04 10:35:53,989 DEBUG blivet/MainThread: DiskDevice.add_child: name: nvme0n1 ; child: req0 ; kids: 1 ;
2020-08-04 10:35:53,992 DEBUG blivet/MainThread: PartitionDevice._set_format: req0 ; type: xfs ; current: None ;
2020-08-04 10:35:53,997 DEBUG blivet/MainThread: DiskDevice.remove_child: name: nvme0n1 ; child: req0 ; kids: 2 ;
2020-08-04 10:35:53,998 INFO blivet/MainThread: added partition req0 (id 234) to device tree
2020-08-04 10:35:53,998 INFO blivet/MainThread: registered action: [237] create device partition req0 (id 234)
2020-08-04 10:35:53,999 DEBUG blivet/MainThread: get_format('None') returning DeviceFormat instance with object id 239
2020-08-04 10:35:54,005 DEBUG blivet/MainThread: PartitionDevice._set_format: req0 ; type: xfs ; current: xfs ;
2020-08-04 10:35:54,006 INFO blivet/MainThread: registered action: [238] create format xfs filesystem mounted at /opt/test1 on partition req0 (id 234)
2020-08-04 10:35:54,011 DEBUG blivet/MainThread: DiskDevice.setup: nvme0n1 ; orig: False ; status: True ; controllable: True ;
2020-08-04 10:35:54,015 DEBUG blivet/MainThread: DiskDevice.setup: sda ; orig: False ; status: True ; controllable: True ;
2020-08-04 10:35:54,018 DEBUG blivet/MainThread: DiskDevice.setup: sdl ; orig: False ; status: True ; controllable: True ;
2020-08-04 10:35:54,019 DEBUG blivet/MainThread: removing all non-preexisting partitions ['nvme0n1p1(id 213)', 'req0(id 234)', 'sda1(id 12)', 'sda2(id 20)', 'sda3(id 30)', 'sdl1(id 153)'] from disk(s) ['nvme0n1', 'sda', 'sdl']
2020-08-04 10:35:54,021 DEBUG blivet/MainThread: allocate_partitions: disks=['nvme0n1', 'sda', 'sdl'] ; partitions=['nvme0n1p1(id 213)', 'req0(id 234)', 'sda1(id 12)', 'sda2(id 20)', 'sda3(id 30)', 'sdl1(id 153)']
2020-08-04 10:35:54,022 DEBUG blivet/MainThread: removing all non-preexisting partitions ['req0(id 234)'] from disk(s) ['nvme0n1', 'sda', 'sdl']
2020-08-04 10:35:54,022 DEBUG blivet/MainThread: allocating partition: req0 ; id: 234 ; disks: ['nvme0n1'] ;
boot: False ; primary: False ; size: 256 MiB ; grow: True ; max_size: 0 B ; start: None ; end: None
2020-08-04 10:35:54,023 DEBUG blivet/MainThread: checking freespace on nvme0n1
2020-08-04 10:35:54,024 DEBUG blivet/MainThread: get_best_free_space_region: disk=/dev/nvme0n1 part_type=0 req_size=256 MiB boot=False best=None grow=True start=None
2020-08-04 10:35:54,024 DEBUG blivet/MainThread: checking 34-2047 (1007 KiB)
2020-08-04 10:35:54,024 DEBUG blivet/MainThread: current free range is 34-2047 (1007 KiB)
2020-08-04 10:35:54,025 DEBUG blivet/MainThread: checking 1562822656-1562824334 (839.5 KiB)
2020-08-04 10:35:54,025 DEBUG blivet/MainThread: current free range is 1562822656-1562824334 (839.5 KiB)
2020-08-04 10:35:54,025 DEBUG blivet/MainThread: not enough free space for primary -- trying logical

@yizhanglinux yizhanglinux changed the title storage: tests_luks.yml failed with nvme disk storage: tests_luks.yml partition case failed with nvme disk Aug 4, 2020
dwlehman added a commit to dwlehman/storage that referenced this issue Aug 4, 2020
@yizhanglinux
Copy link
Collaborator Author

So it's not related to luks, bellow playbook also can reproduce this issue.

---
- hosts: all
  become: true
  vars:
    storage_safe_mode: false
    mount_location: '/opt/test1'
    volume_size: '5g'

  tasks:
    - include_role:
        name: storage

    - include_tasks: get_unused_disk.yml
      vars:
        min_size: "{{ volume_size }}"
        max_return: 1

    ##
    ## Partition
    ##

    - name: Create an encrypted partition volume w/ default fs
      include_role:
        name: storage
      vars:
        storage_pools:
          - name: foo
            type: partition
            disks: "{{ unused_disks }}"
            volumes:
              - name: test1
                type: partition
                mount_point: "{{ mount_location }}"
                #                size: 4g

    - include_tasks: verify-role-results.yml

    - name: Remove the encryption layer
      include_role:
        name: storage
      vars:
        storage_pools:
          - name: foo
            type: partition
            disks: "{{ unused_disks }}"
            volumes:
              - name: test1
                type: partition
                mount_point: "{{ mount_location }}"
                #                size: 4g

    - include_tasks: verify-role-results.yml

    - name: Clean up
      include_role:
        name: storage
      vars:
        storage_pools:
          - name: foo
            type: partition
            disks: "{{ unused_disks }}"

    - include_tasks: verify-role-results.yml

@dwlehman dwlehman self-assigned this Aug 19, 2020
dwlehman added a commit to dwlehman/storage that referenced this issue Aug 20, 2020
BlivetVolume._get_device_id is only used to look up pre-existing
volumes, so we don't have to try too hard to guess it by name.
We can just see if the disk has a single partition and, if so,
return the name of that partition.

Fixes: linux-system-roles#141
dwlehman added a commit to dwlehman/storage that referenced this issue Aug 20, 2020
BlivetVolume._get_device_id is only used to look up pre-existing
volumes, so we don't have to try too hard to guess it by name.
We can just see if the disk has a single partition and, if so,
return the name of that partition.

Fixes: linux-system-roles#141
dwlehman added a commit to dwlehman/storage that referenced this issue Mar 12, 2021
BlivetVolume._get_device_id is only used to look up pre-existing
volumes, so we don't have to try too hard to guess it by name.
We can just see if the disk has a single partition and, if so,
return the name of that partition.

Fixes: linux-system-roles#141
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
None yet
Projects
None yet
Development

Successfully merging a pull request may close this issue.

2 participants