ansible-playbook [core 2.17.14] config file = None configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /usr/local/lib/python3.12/site-packages/ansible ansible collection location = /tmp/collections-aXM executable location = /usr/local/bin/ansible-playbook python version = 3.12.12 (main, Mar 9 2026, 00:00:00) [GCC 14.3.1 20251022 (Red Hat 14.3.1-4)] (/usr/bin/python3.12) jinja version = 3.1.6 libyaml = True No config file found; using defaults running playbook inside collection fedora.linux_system_roles Skipping callback 'debug', as we already have a stdout callback. Skipping callback 'json', as we already have a stdout callback. Skipping callback 'jsonl', as we already have a stdout callback. Skipping callback 'default', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. PLAYBOOK: tests_stratis.yml **************************************************** 1 plays in /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml PLAY [Test stratis pool management] ******************************************** TASK [Gathering Facts] ********************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:2 Saturday 28 March 2026 19:22:25 -0400 (0:00:00.589) 0:00:00.589 ******** [WARNING]: Platform linux on host managed-node12 is using the discovered Python interpreter at /usr/bin/python3.12, but future installation of another Python interpreter could change the meaning of that path. See https://docs.ansible.com/ansible- core/2.17/reference_appendices/interpreter_discovery.html for more information. ok: [managed-node12] TASK [Run the role] ************************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:14 Saturday 28 March 2026 19:22:30 -0400 (0:00:04.574) 0:00:05.164 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tasks/run_role_with_clear_facts.yml for managed-node12 TASK [Clear facts] ************************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tasks/run_role_with_clear_facts.yml:10 Saturday 28 March 2026 19:22:30 -0400 (0:00:00.193) 0:00:05.358 ******** META: facts cleared TASK [Run the role] ************************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tasks/run_role_with_clear_facts.yml:24 Saturday 28 March 2026 19:22:30 -0400 (0:00:00.015) 0:00:05.373 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "__sr_failed_when is defined", "skip_reason": "Conditional result was False" } TASK [Run the role normally] *************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tasks/run_role_with_clear_facts.yml:34 Saturday 28 March 2026 19:22:30 -0400 (0:00:00.175) 0:00:05.548 ******** included: fedora.linux_system_roles.storage for managed-node12 TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Saturday 28 March 2026 19:22:31 -0400 (0:00:00.696) 0:00:06.246 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node12 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Saturday 28 March 2026 19:22:31 -0400 (0:00:00.135) 0:00:06.381 ******** ok: [managed-node12] TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Saturday 28 March 2026 19:22:33 -0400 (0:00:01.872) 0:00:08.254 ******** skipping: [managed-node12] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node12] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node12] => (item=CentOS_10.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-fs", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "xfsprogs", "stratisd", "stratis-cli", "{{ 'libblockdev-s390' if ansible_facts['architecture'] == 's390x' else 'libblockdev' }}", "vdo" ] }, "ansible_included_var_files": [ "/tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node12] => (item=CentOS_10.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-fs", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "xfsprogs", "stratisd", "stratis-cli", "{{ 'libblockdev-s390' if ansible_facts['architecture'] == 's390x' else 'libblockdev' }}", "vdo" ] }, "ansible_included_var_files": [ "/tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Saturday 28 March 2026 19:22:33 -0400 (0:00:00.519) 0:00:08.773 ******** ok: [managed-node12] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Saturday 28 March 2026 19:22:35 -0400 (0:00:02.247) 0:00:11.021 ******** ok: [managed-node12] => { "ansible_facts": { "__storage_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Saturday 28 March 2026 19:22:36 -0400 (0:00:00.420) 0:00:11.441 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Saturday 28 March 2026 19:22:36 -0400 (0:00:00.175) 0:00:11.616 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Saturday 28 March 2026 19:22:36 -0400 (0:00:00.230) 0:00:11.847 ******** redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node12 TASK [fedora.linux_system_roles.storage : Add repo key] ************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Saturday 28 March 2026 19:22:37 -0400 (0:00:00.521) 0:00:12.368 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_blivet_custom_repo.key is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Add blivet repo] ********************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:15 Saturday 28 March 2026 19:22:37 -0400 (0:00:00.228) 0:00:12.597 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_blivet_custom_repo.key is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:20 Saturday 28 March 2026 19:22:37 -0400 (0:00:00.277) 0:00:12.874 ******** ok: [managed-node12] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: libblockdev libblockdev-crypto libblockdev-dm libblockdev-fs libblockdev-lvm libblockdev-mdraid libblockdev-swap python3-blivet stratis-cli stratisd vdo xfsprogs TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:27 Saturday 28 March 2026 19:22:41 -0400 (0:00:03.401) 0:00:16.276 ******** ok: [managed-node12] => { "storage_pools | d([])": [] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:32 Saturday 28 March 2026 19:22:41 -0400 (0:00:00.226) 0:00:16.502 ******** ok: [managed-node12] => { "storage_volumes | d([])": [] } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37 Saturday 28 March 2026 19:22:41 -0400 (0:00:00.229) 0:00:16.731 ******** ok: [managed-node12] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:50 Saturday 28 March 2026 19:22:45 -0400 (0:00:03.413) 0:00:20.145 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml for managed-node12 TASK [fedora.linux_system_roles.storage : Check if the COPR support packages should be installed] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:2 Saturday 28 March 2026 19:22:45 -0400 (0:00:00.405) 0:00:20.550 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Make sure COPR support packages are present] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:13 Saturday 28 March 2026 19:22:45 -0400 (0:00:00.140) 0:00:20.691 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "install_copr | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable COPRs] ************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:19 Saturday 28 March 2026 19:22:45 -0400 (0:00:00.126) 0:00:20.818 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:56 Saturday 28 March 2026 19:22:45 -0400 (0:00:00.152) 0:00:20.971 ******** ok: [managed-node12] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: kpartx TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:76 Saturday 28 March 2026 19:22:47 -0400 (0:00:01.672) 0:00:22.644 ******** ok: [managed-node12] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "apt-daily.service": { "name": "apt-daily.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fcoe.service": { "name": "fcoe.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "iscsi-shutdown.service": { "name": "iscsi-shutdown.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsi.service": { "name": "iscsi.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsid.service": { "name": "iscsid.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-activation-early.service": { "name": "lvm2-activation-early.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_multipath.service": { "name": "modprobe@dm_multipath.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "raid-check.service": { "name": "raid-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "rbdmap.service": { "name": "rbdmap.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-unix-local@.service": { "name": "sshd-unix-local@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd-vsock@.service": { "name": "sshd-vsock@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "stratis-fstab-setup-with-network@.service": { "name": "stratis-fstab-setup-with-network@.service", "source": "systemd", "state": "unknown", "status": "static" }, "stratis-fstab-setup@.service": { "name": "stratis-fstab-setup@.service", "source": "systemd", "state": "unknown", "status": "static" }, "stratisd-min-postinitrd.service": { "name": "stratisd-min-postinitrd.service", "source": "systemd", "state": "inactive", "status": "static" }, "stratisd.service": { "name": "stratisd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-userdbd.service": { "name": "systemd-userdbd.service", "source": "systemd", "state": "running", "status": "indirect" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:82 Saturday 28 March 2026 19:22:51 -0400 (0:00:04.403) 0:00:27.047 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:88 Saturday 28 March 2026 19:22:52 -0400 (0:00:00.431) 0:00:27.479 ******** ok: [managed-node12] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:103 Saturday 28 March 2026 19:22:53 -0400 (0:00:01.446) 0:00:28.925 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_udevadm_trigger | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ****** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:110 Saturday 28 March 2026 19:22:53 -0400 (0:00:00.157) 0:00:29.083 ******** ok: [managed-node12] => { "changed": false, "stat": { "atime": 1774740053.2468836, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "6aeccfbb3223a634b983c3c21792c1ba90809bb8", "ctime": 1774740128.421465, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 230686985, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1774740128.421465, "nlink": 1, "path": "/etc/fstab", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1408, "uid": 0, "version": "1679064280", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:115 Saturday 28 March 2026 19:22:54 -0400 (0:00:01.006) 0:00:30.089 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "blivet_output is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:133 Saturday 28 March 2026 19:22:55 -0400 (0:00:00.216) 0:00:30.306 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:139 Saturday 28 March 2026 19:22:55 -0400 (0:00:00.342) 0:00:30.648 ******** ok: [managed-node12] => { "blivet_output": { "actions": [], "changed": false, "crypts": [], "failed": false, "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148 Saturday 28 March 2026 19:22:55 -0400 (0:00:00.254) 0:00:30.903 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:152 Saturday 28 March 2026 19:22:55 -0400 (0:00:00.116) 0:00:31.019 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:168 Saturday 28 March 2026 19:22:56 -0400 (0:00:00.247) 0:00:31.266 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:179 Saturday 28 March 2026 19:22:56 -0400 (0:00:00.243) 0:00:31.510 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "blivet_output['mounts'] | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:184 Saturday 28 March 2026 19:22:56 -0400 (0:00:00.157) 0:00:31.668 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195 Saturday 28 March 2026 19:22:56 -0400 (0:00:00.226) 0:00:31.895 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:207 Saturday 28 March 2026 19:22:57 -0400 (0:00:00.235) 0:00:32.131 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "blivet_output['mounts'] | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:215 Saturday 28 March 2026 19:22:57 -0400 (0:00:00.231) 0:00:32.362 ******** ok: [managed-node12] => { "changed": false, "stat": { "atime": 1774737762.6954868, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1774342704.475, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 4194435, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1774342323.629, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "1224473831", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:220 Saturday 28 March 2026 19:22:58 -0400 (0:00:01.097) 0:00:33.459 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:242 Saturday 28 March 2026 19:22:58 -0400 (0:00:00.169) 0:00:33.629 ******** ok: [managed-node12] TASK [Mark tasks to be skipped] ************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:17 Saturday 28 March 2026 19:23:00 -0400 (0:00:01.858) 0:00:35.488 ******** ok: [managed-node12] => { "ansible_facts": { "storage_skip_checks": [ "blivet_available", "packages_installed" ] }, "changed": false } TASK [Gather package facts] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:23 Saturday 28 March 2026 19:23:00 -0400 (0:00:00.281) 0:00:35.770 ******** ok: [managed-node12] => { "ansible_facts": { "packages": { "NetworkManager": [ { "arch": "x86_64", "epoch": 1, "name": "NetworkManager", "release": "1.el10", "source": "rpm", "version": "1.56.0" } ], "NetworkManager-libnm": [ { "arch": "x86_64", "epoch": 1, "name": "NetworkManager-libnm", "release": "1.el10", "source": "rpm", "version": "1.56.0" } ], "NetworkManager-tui": [ { "arch": "x86_64", "epoch": 1, "name": "NetworkManager-tui", "release": "1.el10", "source": "rpm", "version": "1.56.0" } ], "alternatives": [ { "arch": "x86_64", "epoch": null, "name": "alternatives", "release": "2.el10", "source": "rpm", "version": "1.30" } ], "amd-gpu-firmware": [ { "arch": "noarch", "epoch": null, "name": "amd-gpu-firmware", "release": "22.el10", "source": "rpm", "version": "20260130" } ], "amd-ucode-firmware": [ { "arch": "noarch", "epoch": null, "name": "amd-ucode-firmware", "release": "22.el10", "source": "rpm", "version": "20260130" } ], "aspell": [ { "arch": "x86_64", "epoch": 12, "name": "aspell", "release": "3.el10", "source": "rpm", "version": "0.60.8.1" } ], "atheros-firmware": [ { "arch": "noarch", "epoch": null, "name": "atheros-firmware", "release": "22.el10", "source": "rpm", "version": "20260130" } ], "attr": [ { "arch": "x86_64", "epoch": null, "name": "attr", "release": "5.el10", "source": "rpm", "version": "2.5.2" } ], "audit": [ { "arch": "x86_64", "epoch": null, "name": "audit", "release": "5.el10", "source": "rpm", "version": "4.0.3" } ], "audit-libs": [ { "arch": "x86_64", "epoch": null, "name": "audit-libs", "release": "5.el10", "source": "rpm", "version": "4.0.3" } ], "audit-rules": [ { "arch": "x86_64", "epoch": null, "name": "audit-rules", "release": "5.el10", "source": "rpm", "version": "4.0.3" } ], "authselect": [ { "arch": "x86_64", "epoch": null, "name": "authselect", "release": "1.el10", "source": "rpm", "version": "1.5.2" } ], "authselect-libs": [ { "arch": "x86_64", "epoch": null, "name": "authselect-libs", "release": "1.el10", "source": "rpm", "version": "1.5.2" } ], "avahi-libs": [ { "arch": "x86_64", "epoch": null, "name": "avahi-libs", "release": "2.el10", "source": "rpm", "version": "0.9~rc2" } ], "basesystem": [ { "arch": "noarch", "epoch": null, "name": "basesystem", "release": "22.el10", "source": "rpm", "version": "11" } ], "bash": [ { "arch": "x86_64", "epoch": null, "name": "bash", "release": "6.el10", "source": "rpm", "version": "5.2.26" } ], "bc": [ { "arch": "x86_64", "epoch": null, "name": "bc", "release": "23.el10", "source": "rpm", "version": "1.07.1" } ], "beakerlib": [ { "arch": "noarch", "epoch": null, "name": "beakerlib", "release": "1.el10", "source": "rpm", "version": "1.31.4" } ], "beakerlib-redhat": [ { "arch": "noarch", "epoch": null, "name": "beakerlib-redhat", "release": "35.el9", "source": "rpm", "version": "1" } ], "binutils": [ { "arch": "x86_64", "epoch": null, "name": "binutils", "release": "64.el10", "source": "rpm", "version": "2.41" } ], "binutils-gold": [ { "arch": "x86_64", "epoch": null, "name": "binutils-gold", "release": "64.el10", "source": "rpm", "version": "2.41" } ], "bison": [ { "arch": "x86_64", "epoch": null, "name": "bison", "release": "9.el10", "source": "rpm", "version": "3.8.2" } ], "blivet-data": [ { "arch": "noarch", "epoch": 1, "name": "blivet-data", "release": "5.el10", "source": "rpm", "version": "3.13.0" } ], "boost-atomic": [ { "arch": "x86_64", "epoch": null, "name": "boost-atomic", "release": "7.el10", "source": "rpm", "version": "1.83.0" } ], "boost-filesystem": [ { "arch": "x86_64", "epoch": null, "name": "boost-filesystem", "release": "7.el10", "source": "rpm", "version": "1.83.0" } ], "boost-system": [ { "arch": "x86_64", "epoch": null, "name": "boost-system", "release": "7.el10", "source": "rpm", "version": "1.83.0" } ], "boost-thread": [ { "arch": "x86_64", "epoch": null, "name": "boost-thread", "release": "7.el10", "source": "rpm", "version": "1.83.0" } ], "brcmfmac-firmware": [ { "arch": "noarch", "epoch": null, "name": "brcmfmac-firmware", "release": "22.el10", "source": "rpm", "version": "20260130" } ], "bzip2-libs": [ { "arch": "x86_64", "epoch": null, "name": "bzip2-libs", "release": "25.el10", "source": "rpm", "version": "1.0.8" } ], "c-ares": [ { "arch": "x86_64", "epoch": null, "name": "c-ares", "release": "1.el10", "source": "rpm", "version": "1.34.6" } ], "ca-certificates": [ { "arch": "noarch", "epoch": null, "name": "ca-certificates", "release": "102.el10", "source": "rpm", "version": "2025.2.80_v9.0.305" } ], "centos-gpg-keys": [ { "arch": "noarch", "epoch": null, "name": "centos-gpg-keys", "release": "20.el10", "source": "rpm", "version": "10.0" } ], "centos-stream-release": [ { "arch": "noarch", "epoch": null, "name": "centos-stream-release", "release": "20.el10", "source": "rpm", "version": "10.0" } ], "centos-stream-repos": [ { "arch": "noarch", "epoch": null, "name": "centos-stream-repos", "release": "20.el10", "source": "rpm", "version": "10.0" } ], "checkpolicy": [ { "arch": "x86_64", "epoch": null, "name": "checkpolicy", "release": "1.el10", "source": "rpm", "version": "3.10" } ], "chrony": [ { "arch": "x86_64", "epoch": null, "name": "chrony", "release": "2.el10", "source": "rpm", "version": "4.8" } ], "cirrus-audio-firmware": [ { "arch": "noarch", "epoch": null, "name": "cirrus-audio-firmware", "release": "22.el10", "source": "rpm", "version": "20260130" } ], "clevis": [ { "arch": "x86_64", "epoch": null, "name": "clevis", "release": "9.el10", "source": "rpm", "version": "21" } ], "clevis-luks": [ { "arch": "x86_64", "epoch": null, "name": "clevis-luks", "release": "9.el10", "source": "rpm", "version": "21" } ], "clevis-pin-tpm2": [ { "arch": "x86_64", "epoch": null, "name": "clevis-pin-tpm2", "release": "9.el10", "source": "rpm", "version": "0.5.3" } ], "cloud-init": [ { "arch": "noarch", "epoch": null, "name": "cloud-init", "release": "7.el10", "source": "rpm", "version": "24.4" } ], "cloud-utils-growpart": [ { "arch": "noarch", "epoch": null, "name": "cloud-utils-growpart", "release": "11.el10", "source": "rpm", "version": "0.33" } ], "cmake-filesystem": [ { "arch": "x86_64", "epoch": null, "name": "cmake-filesystem", "release": "1.el10", "source": "rpm", "version": "3.31.8" } ], "coreutils": [ { "arch": "x86_64", "epoch": null, "name": "coreutils", "release": "7.el10", "source": "rpm", "version": "9.5" } ], "coreutils-common": [ { "arch": "x86_64", "epoch": null, "name": "coreutils-common", "release": "7.el10", "source": "rpm", "version": "9.5" } ], "cpio": [ { "arch": "x86_64", "epoch": null, "name": "cpio", "release": "3.el10", "source": "rpm", "version": "2.15" } ], "cpp": [ { "arch": "x86_64", "epoch": null, "name": "cpp", "release": "4.4.el10", "source": "rpm", "version": "14.3.1" } ], "cracklib": [ { "arch": "x86_64", "epoch": null, "name": "cracklib", "release": "8.el10", "source": "rpm", "version": "2.9.11" } ], "cracklib-dicts": [ { "arch": "x86_64", "epoch": null, "name": "cracklib-dicts", "release": "8.el10", "source": "rpm", "version": "2.9.11" } ], "cronie": [ { "arch": "x86_64", "epoch": null, "name": "cronie", "release": "14.el10", "source": "rpm", "version": "1.7.0" } ], "cronie-anacron": [ { "arch": "x86_64", "epoch": null, "name": "cronie-anacron", "release": "14.el10", "source": "rpm", "version": "1.7.0" } ], "crontabs": [ { "arch": "noarch", "epoch": null, "name": "crontabs", "release": "6.el10", "source": "rpm", "version": "1.11^20190603git9e74f2d" } ], "crypto-policies": [ { "arch": "noarch", "epoch": null, "name": "crypto-policies", "release": "1.git0e54016.el10", "source": "rpm", "version": "20260216" } ], "crypto-policies-scripts": [ { "arch": "noarch", "epoch": null, "name": "crypto-policies-scripts", "release": "1.git0e54016.el10", "source": "rpm", "version": "20260216" } ], "cryptsetup": [ { "arch": "x86_64", "epoch": null, "name": "cryptsetup", "release": "2.el10", "source": "rpm", "version": "2.8.1" } ], "cryptsetup-libs": [ { "arch": "x86_64", "epoch": null, "name": "cryptsetup-libs", "release": "2.el10", "source": "rpm", "version": "2.8.1" } ], "curl": [ { "arch": "x86_64", "epoch": null, "name": "curl", "release": "4.el10", "source": "rpm", "version": "8.12.1" } ], "cyrus-sasl-gssapi": [ { "arch": "x86_64", "epoch": null, "name": "cyrus-sasl-gssapi", "release": "27.el10", "source": "rpm", "version": "2.1.28" } ], "cyrus-sasl-lib": [ { "arch": "x86_64", "epoch": null, "name": "cyrus-sasl-lib", "release": "27.el10", "source": "rpm", "version": "2.1.28" } ], "dbus": [ { "arch": "x86_64", "epoch": 1, "name": "dbus", "release": "5.el10", "source": "rpm", "version": "1.14.10" } ], "dbus-broker": [ { "arch": "x86_64", "epoch": null, "name": "dbus-broker", "release": "4.el10", "source": "rpm", "version": "36" } ], "dbus-common": [ { "arch": "noarch", "epoch": 1, "name": "dbus-common", "release": "5.el10", "source": "rpm", "version": "1.14.10" } ], "dbus-libs": [ { "arch": "x86_64", "epoch": 1, "name": "dbus-libs", "release": "5.el10", "source": "rpm", "version": "1.14.10" } ], "default-fonts-core-sans": [ { "arch": "noarch", "epoch": null, "name": "default-fonts-core-sans", "release": "3.el10", "source": "rpm", "version": "4.1" } ], "device-mapper": [ { "arch": "x86_64", "epoch": 10, "name": "device-mapper", "release": "2.el10", "source": "rpm", "version": "1.02.210" } ], "device-mapper-event": [ { "arch": "x86_64", "epoch": 10, "name": "device-mapper-event", "release": "2.el10", "source": "rpm", "version": "1.02.210" } ], "device-mapper-event-libs": [ { "arch": "x86_64", "epoch": 10, "name": "device-mapper-event-libs", "release": "2.el10", "source": "rpm", "version": "1.02.210" } ], "device-mapper-libs": [ { "arch": "x86_64", "epoch": 10, "name": "device-mapper-libs", "release": "2.el10", "source": "rpm", "version": "1.02.210" } ], "device-mapper-multipath": [ { "arch": "x86_64", "epoch": null, "name": "device-mapper-multipath", "release": "18.el10", "source": "rpm", "version": "0.9.9" } ], "device-mapper-multipath-libs": [ { "arch": "x86_64", "epoch": null, "name": "device-mapper-multipath-libs", "release": "18.el10", "source": "rpm", "version": "0.9.9" } ], "device-mapper-persistent-data": [ { "arch": "x86_64", "epoch": null, "name": "device-mapper-persistent-data", "release": "1.el10", "source": "rpm", "version": "1.3.0" } ], "dhcpcd": [ { "arch": "x86_64", "epoch": null, "name": "dhcpcd", "release": "10.el10", "source": "rpm", "version": "10.0.6" } ], "diffutils": [ { "arch": "x86_64", "epoch": null, "name": "diffutils", "release": "8.el10", "source": "rpm", "version": "3.10" } ], "dnf": [ { "arch": "noarch", "epoch": null, "name": "dnf", "release": "21.el10", "source": "rpm", "version": "4.20.0" } ], "dnf-data": [ { "arch": "noarch", "epoch": null, "name": "dnf-data", "release": "21.el10", "source": "rpm", "version": "4.20.0" } ], "dnf-plugins-core": [ { "arch": "noarch", "epoch": null, "name": "dnf-plugins-core", "release": "10.el10", "source": "rpm", "version": "4.7.0" } ], "dracut": [ { "arch": "x86_64", "epoch": null, "name": "dracut", "release": "4.el10", "source": "rpm", "version": "107" } ], "dracut-config-rescue": [ { "arch": "x86_64", "epoch": null, "name": "dracut-config-rescue", "release": "4.el10", "source": "rpm", "version": "107" } ], "dracut-network": [ { "arch": "x86_64", "epoch": null, "name": "dracut-network", "release": "4.el10", "source": "rpm", "version": "107" } ], "dracut-squash": [ { "arch": "x86_64", "epoch": null, "name": "dracut-squash", "release": "4.el10", "source": "rpm", "version": "107" } ], "dyninst": [ { "arch": "x86_64", "epoch": null, "name": "dyninst", "release": "1.el10", "source": "rpm", "version": "13.0.0" } ], "e2fsprogs": [ { "arch": "x86_64", "epoch": null, "name": "e2fsprogs", "release": "5.el10", "source": "rpm", "version": "1.47.1" } ], "e2fsprogs-libs": [ { "arch": "x86_64", "epoch": null, "name": "e2fsprogs-libs", "release": "5.el10", "source": "rpm", "version": "1.47.1" } ], "efivar-libs": [ { "arch": "x86_64", "epoch": null, "name": "efivar-libs", "release": "3.el10", "source": "rpm", "version": "39" } ], "elfutils-debuginfod-client": [ { "arch": "x86_64", "epoch": null, "name": "elfutils-debuginfod-client", "release": "2.el10", "source": "rpm", "version": "0.194" } ], "elfutils-debuginfod-client-devel": [ { "arch": "x86_64", "epoch": null, "name": "elfutils-debuginfod-client-devel", "release": "2.el10", "source": "rpm", "version": "0.194" } ], "elfutils-default-yama-scope": [ { "arch": "noarch", "epoch": null, "name": "elfutils-default-yama-scope", "release": "2.el10", "source": "rpm", "version": "0.194" } ], "elfutils-devel": [ { "arch": "x86_64", "epoch": null, "name": "elfutils-devel", "release": "2.el10", "source": "rpm", "version": "0.194" } ], "elfutils-libelf": [ { "arch": "x86_64", "epoch": null, "name": "elfutils-libelf", "release": "2.el10", "source": "rpm", "version": "0.194" } ], "elfutils-libelf-devel": [ { "arch": "x86_64", "epoch": null, "name": "elfutils-libelf-devel", "release": "2.el10", "source": "rpm", "version": "0.194" } ], "elfutils-libs": [ { "arch": "x86_64", "epoch": null, "name": "elfutils-libs", "release": "2.el10", "source": "rpm", "version": "0.194" } ], "epel-release": [ { "arch": "noarch", "epoch": null, "name": "epel-release", "release": "5.el10s", "source": "rpm", "version": "10" } ], "erofs-utils": [ { "arch": "x86_64", "epoch": null, "name": "erofs-utils", "release": "1.el10", "source": "rpm", "version": "1.8.10" } ], "ethtool": [ { "arch": "x86_64", "epoch": 2, "name": "ethtool", "release": "2.el10", "source": "rpm", "version": "6.15" } ], "expat": [ { "arch": "x86_64", "epoch": null, "name": "expat", "release": "1.el10", "source": "rpm", "version": "2.7.3" } ], "file": [ { "arch": "x86_64", "epoch": null, "name": "file", "release": "9.el10", "source": "rpm", "version": "5.45" } ], "file-libs": [ { "arch": "x86_64", "epoch": null, "name": "file-libs", "release": "9.el10", "source": "rpm", "version": "5.45" } ], "filesystem": [ { "arch": "x86_64", "epoch": null, "name": "filesystem", "release": "17.el10", "source": "rpm", "version": "3.18" } ], "findutils": [ { "arch": "x86_64", "epoch": 1, "name": "findutils", "release": "5.el10", "source": "rpm", "version": "4.10.0" } ], "firewalld": [ { "arch": "noarch", "epoch": null, "name": "firewalld", "release": "1.el10", "source": "rpm", "version": "2.4.0" } ], "firewalld-filesystem": [ { "arch": "noarch", "epoch": null, "name": "firewalld-filesystem", "release": "1.el10", "source": "rpm", "version": "2.4.0" } ], "flex": [ { "arch": "x86_64", "epoch": null, "name": "flex", "release": "19.el10", "source": "rpm", "version": "2.6.4" } ], "fonts-filesystem": [ { "arch": "noarch", "epoch": 1, "name": "fonts-filesystem", "release": "18.el10", "source": "rpm", "version": "2.0.5" } ], "fuse-libs": [ { "arch": "x86_64", "epoch": null, "name": "fuse-libs", "release": "25.el10", "source": "rpm", "version": "2.9.9" } ], "fuse3-libs": [ { "arch": "x86_64", "epoch": null, "name": "fuse3-libs", "release": "5.el10", "source": "rpm", "version": "3.16.2" } ], "gawk": [ { "arch": "x86_64", "epoch": null, "name": "gawk", "release": "6.el10", "source": "rpm", "version": "5.3.0" } ], "gawk-all-langpacks": [ { "arch": "x86_64", "epoch": null, "name": "gawk-all-langpacks", "release": "6.el10", "source": "rpm", "version": "5.3.0" } ], "gcc": [ { "arch": "x86_64", "epoch": null, "name": "gcc", "release": "4.4.el10", "source": "rpm", "version": "14.3.1" } ], "gdbm": [ { "arch": "x86_64", "epoch": 1, "name": "gdbm", "release": "14.el10", "source": "rpm", "version": "1.23" } ], "gdbm-libs": [ { "arch": "x86_64", "epoch": 1, "name": "gdbm-libs", "release": "14.el10", "source": "rpm", "version": "1.23" } ], "gettext-envsubst": [ { "arch": "x86_64", "epoch": null, "name": "gettext-envsubst", "release": "6.el10", "source": "rpm", "version": "0.22.5" } ], "gettext-libs": [ { "arch": "x86_64", "epoch": null, "name": "gettext-libs", "release": "6.el10", "source": "rpm", "version": "0.22.5" } ], "gettext-runtime": [ { "arch": "x86_64", "epoch": null, "name": "gettext-runtime", "release": "6.el10", "source": "rpm", "version": "0.22.5" } ], "git": [ { "arch": "x86_64", "epoch": null, "name": "git", "release": "1.el10", "source": "rpm", "version": "2.52.0" } ], "git-core": [ { "arch": "x86_64", "epoch": null, "name": "git-core", "release": "1.el10", "source": "rpm", "version": "2.52.0" } ], "git-core-doc": [ { "arch": "noarch", "epoch": null, "name": "git-core-doc", "release": "1.el10", "source": "rpm", "version": "2.52.0" } ], "glib2": [ { "arch": "x86_64", "epoch": null, "name": "glib2", "release": "12.el10", "source": "rpm", "version": "2.80.4" } ], "glibc": [ { "arch": "x86_64", "epoch": null, "name": "glibc", "release": "117.el10", "source": "rpm", "version": "2.39" } ], "glibc-common": [ { "arch": "x86_64", "epoch": null, "name": "glibc-common", "release": "117.el10", "source": "rpm", "version": "2.39" } ], "glibc-devel": [ { "arch": "x86_64", "epoch": null, "name": "glibc-devel", "release": "117.el10", "source": "rpm", "version": "2.39" } ], "glibc-gconv-extra": [ { "arch": "x86_64", "epoch": null, "name": "glibc-gconv-extra", "release": "117.el10", "source": "rpm", "version": "2.39" } ], "glibc-langpack-en": [ { "arch": "x86_64", "epoch": null, "name": "glibc-langpack-en", "release": "117.el10", "source": "rpm", "version": "2.39" } ], "gmp": [ { "arch": "x86_64", "epoch": 1, "name": "gmp", "release": "12.el10", "source": "rpm", "version": "6.2.1" } ], "gnutls": [ { "arch": "x86_64", "epoch": null, "name": "gnutls", "release": "3.el10", "source": "rpm", "version": "3.8.10" } ], "gobject-introspection": [ { "arch": "x86_64", "epoch": null, "name": "gobject-introspection", "release": "6.el10", "source": "rpm", "version": "1.79.1" } ], "google-noto-fonts-common": [ { "arch": "noarch", "epoch": null, "name": "google-noto-fonts-common", "release": "5.el10", "source": "rpm", "version": "20240401" } ], "google-noto-sans-mono-vf-fonts": [ { "arch": "noarch", "epoch": null, "name": "google-noto-sans-mono-vf-fonts", "release": "5.el10", "source": "rpm", "version": "20240401" } ], "google-noto-sans-vf-fonts": [ { "arch": "noarch", "epoch": null, "name": "google-noto-sans-vf-fonts", "release": "5.el10", "source": "rpm", "version": "20240401" } ], "google-noto-serif-vf-fonts": [ { "arch": "noarch", "epoch": null, "name": "google-noto-serif-vf-fonts", "release": "5.el10", "source": "rpm", "version": "20240401" } ], "gpg-pubkey": [ { "arch": null, "epoch": null, "name": "gpg-pubkey", "release": "5ccc5b19", "source": "rpm", "version": "8483c65d" } ], "grep": [ { "arch": "x86_64", "epoch": null, "name": "grep", "release": "10.el10", "source": "rpm", "version": "3.11" } ], "groff-base": [ { "arch": "x86_64", "epoch": null, "name": "groff-base", "release": "10.el10", "source": "rpm", "version": "1.23.0" } ], "grub2-common": [ { "arch": "noarch", "epoch": 1, "name": "grub2-common", "release": "45.el10", "source": "rpm", "version": "2.12" } ], "grub2-pc": [ { "arch": "x86_64", "epoch": 1, "name": "grub2-pc", "release": "45.el10", "source": "rpm", "version": "2.12" } ], "grub2-pc-modules": [ { "arch": "noarch", "epoch": 1, "name": "grub2-pc-modules", "release": "45.el10", "source": "rpm", "version": "2.12" } ], "grub2-tools": [ { "arch": "x86_64", "epoch": 1, "name": "grub2-tools", "release": "45.el10", "source": "rpm", "version": "2.12" } ], "grub2-tools-minimal": [ { "arch": "x86_64", "epoch": 1, "name": "grub2-tools-minimal", "release": "45.el10", "source": "rpm", "version": "2.12" } ], "grubby": [ { "arch": "x86_64", "epoch": null, "name": "grubby", "release": "83.el10", "source": "rpm", "version": "8.40" } ], "gssproxy": [ { "arch": "x86_64", "epoch": null, "name": "gssproxy", "release": "10.el10", "source": "rpm", "version": "0.9.2" } ], "gzip": [ { "arch": "x86_64", "epoch": null, "name": "gzip", "release": "3.el10", "source": "rpm", "version": "1.13" } ], "hostname": [ { "arch": "x86_64", "epoch": null, "name": "hostname", "release": "14.el10", "source": "rpm", "version": "3.23" } ], "hunspell": [ { "arch": "x86_64", "epoch": null, "name": "hunspell", "release": "9.el10", "source": "rpm", "version": "1.7.2" } ], "hunspell-en": [ { "arch": "noarch", "epoch": null, "name": "hunspell-en", "release": "11.el10", "source": "rpm", "version": "0.20201207" } ], "hunspell-en-GB": [ { "arch": "noarch", "epoch": null, "name": "hunspell-en-GB", "release": "11.el10", "source": "rpm", "version": "0.20201207" } ], "hunspell-en-US": [ { "arch": "noarch", "epoch": null, "name": "hunspell-en-US", "release": "11.el10", "source": "rpm", "version": "0.20201207" } ], "hunspell-filesystem": [ { "arch": "x86_64", "epoch": null, "name": "hunspell-filesystem", "release": "9.el10", "source": "rpm", "version": "1.7.2" } ], "hwdata": [ { "arch": "noarch", "epoch": null, "name": "hwdata", "release": "10.8.el10", "source": "rpm", "version": "0.379" } ], "ima-evm-utils": [ { "arch": "x86_64", "epoch": null, "name": "ima-evm-utils", "release": "4.el10", "source": "rpm", "version": "1.6.2" } ], "inih": [ { "arch": "x86_64", "epoch": null, "name": "inih", "release": "3.el10", "source": "rpm", "version": "58" } ], "initscripts-rename-device": [ { "arch": "x86_64", "epoch": null, "name": "initscripts-rename-device", "release": "2.el10", "source": "rpm", "version": "10.26" } ], "initscripts-service": [ { "arch": "noarch", "epoch": null, "name": "initscripts-service", "release": "2.el10", "source": "rpm", "version": "10.26" } ], "intel-audio-firmware": [ { "arch": "noarch", "epoch": null, "name": "intel-audio-firmware", "release": "22.el10", "source": "rpm", "version": "20260130" } ], "intel-gpu-firmware": [ { "arch": "noarch", "epoch": null, "name": "intel-gpu-firmware", "release": "22.el10", "source": "rpm", "version": "20260130" } ], "iproute": [ { "arch": "x86_64", "epoch": null, "name": "iproute", "release": "2.el10", "source": "rpm", "version": "6.17.0" } ], "iproute-tc": [ { "arch": "x86_64", "epoch": null, "name": "iproute-tc", "release": "2.el10", "source": "rpm", "version": "6.17.0" } ], "ipset": [ { "arch": "x86_64", "epoch": null, "name": "ipset", "release": "9.el10", "source": "rpm", "version": "7.22" } ], "ipset-libs": [ { "arch": "x86_64", "epoch": null, "name": "ipset-libs", "release": "9.el10", "source": "rpm", "version": "7.22" } ], "iptables-libs": [ { "arch": "x86_64", "epoch": null, "name": "iptables-libs", "release": "12.el10", "source": "rpm", "version": "1.8.11" } ], "iptables-nft": [ { "arch": "x86_64", "epoch": null, "name": "iptables-nft", "release": "12.el10", "source": "rpm", "version": "1.8.11" } ], "iputils": [ { "arch": "x86_64", "epoch": null, "name": "iputils", "release": "5.el10", "source": "rpm", "version": "20240905" } ], "irqbalance": [ { "arch": "x86_64", "epoch": 2, "name": "irqbalance", "release": "6.el10", "source": "rpm", "version": "1.9.4" } ], "iwlwifi-dvm-firmware": [ { "arch": "noarch", "epoch": null, "name": "iwlwifi-dvm-firmware", "release": "22.el10", "source": "rpm", "version": "20260130" } ], "iwlwifi-mvm-firmware": [ { "arch": "noarch", "epoch": null, "name": "iwlwifi-mvm-firmware", "release": "22.el10", "source": "rpm", "version": "20260130" } ], "jansson": [ { "arch": "x86_64", "epoch": null, "name": "jansson", "release": "3.el10", "source": "rpm", "version": "2.14" } ], "jitterentropy": [ { "arch": "x86_64", "epoch": null, "name": "jitterentropy", "release": "2.el10", "source": "rpm", "version": "3.6.0" } ], "jose": [ { "arch": "x86_64", "epoch": null, "name": "jose", "release": "102.el10", "source": "rpm", "version": "14" } ], "jq": [ { "arch": "x86_64", "epoch": null, "name": "jq", "release": "11.el10", "source": "rpm", "version": "1.7.1" } ], "json-c": [ { "arch": "x86_64", "epoch": null, "name": "json-c", "release": "3.el10", "source": "rpm", "version": "0.18" } ], "kbd": [ { "arch": "x86_64", "epoch": null, "name": "kbd", "release": "7.el10", "source": "rpm", "version": "2.6.4" } ], "kbd-legacy": [ { "arch": "noarch", "epoch": null, "name": "kbd-legacy", "release": "7.el10", "source": "rpm", "version": "2.6.4" } ], "kbd-misc": [ { "arch": "noarch", "epoch": null, "name": "kbd-misc", "release": "7.el10", "source": "rpm", "version": "2.6.4" } ], "kdump-utils": [ { "arch": "x86_64", "epoch": null, "name": "kdump-utils", "release": "15.el10", "source": "rpm", "version": "1.0.58" } ], "kernel": [ { "arch": "x86_64", "epoch": null, "name": "kernel", "release": "214.el10", "source": "rpm", "version": "6.12.0" } ], "kernel-core": [ { "arch": "x86_64", "epoch": null, "name": "kernel-core", "release": "214.el10", "source": "rpm", "version": "6.12.0" } ], "kernel-devel": [ { "arch": "x86_64", "epoch": null, "name": "kernel-devel", "release": "214.el10", "source": "rpm", "version": "6.12.0" } ], "kernel-headers": [ { "arch": "x86_64", "epoch": null, "name": "kernel-headers", "release": "214.el10", "source": "rpm", "version": "6.12.0" } ], "kernel-modules": [ { "arch": "x86_64", "epoch": null, "name": "kernel-modules", "release": "214.el10", "source": "rpm", "version": "6.12.0" } ], "kernel-modules-core": [ { "arch": "x86_64", "epoch": null, "name": "kernel-modules-core", "release": "214.el10", "source": "rpm", "version": "6.12.0" } ], "kernel-modules-extra": [ { "arch": "x86_64", "epoch": null, "name": "kernel-modules-extra", "release": "214.el10", "source": "rpm", "version": "6.12.0" } ], "kernel-tools": [ { "arch": "x86_64", "epoch": null, "name": "kernel-tools", "release": "214.el10", "source": "rpm", "version": "6.12.0" } ], "kernel-tools-libs": [ { "arch": "x86_64", "epoch": null, "name": "kernel-tools-libs", "release": "214.el10", "source": "rpm", "version": "6.12.0" } ], "kexec-tools": [ { "arch": "x86_64", "epoch": null, "name": "kexec-tools", "release": "3.el10", "source": "rpm", "version": "2.0.32" } ], "keyutils": [ { "arch": "x86_64", "epoch": null, "name": "keyutils", "release": "5.el10", "source": "rpm", "version": "1.6.3" } ], "keyutils-libs": [ { "arch": "x86_64", "epoch": null, "name": "keyutils-libs", "release": "5.el10", "source": "rpm", "version": "1.6.3" } ], "kmod": [ { "arch": "x86_64", "epoch": null, "name": "kmod", "release": "13.el10", "source": "rpm", "version": "31" } ], "kmod-libs": [ { "arch": "x86_64", "epoch": null, "name": "kmod-libs", "release": "13.el10", "source": "rpm", "version": "31" } ], "kpartx": [ { "arch": "x86_64", "epoch": null, "name": "kpartx", "release": "18.el10", "source": "rpm", "version": "0.9.9" } ], "krb5-libs": [ { "arch": "x86_64", "epoch": null, "name": "krb5-libs", "release": "9.el10", "source": "rpm", "version": "1.21.3" } ], "langpacks-core-en": [ { "arch": "noarch", "epoch": null, "name": "langpacks-core-en", "release": "3.el10", "source": "rpm", "version": "4.1" } ], "langpacks-en": [ { "arch": "noarch", "epoch": null, "name": "langpacks-en", "release": "3.el10", "source": "rpm", "version": "4.1" } ], "langpacks-fonts-en": [ { "arch": "noarch", "epoch": null, "name": "langpacks-fonts-en", "release": "3.el10", "source": "rpm", "version": "4.1" } ], "less": [ { "arch": "x86_64", "epoch": null, "name": "less", "release": "3.el10", "source": "rpm", "version": "661" } ], "libacl": [ { "arch": "x86_64", "epoch": null, "name": "libacl", "release": "4.el10", "source": "rpm", "version": "2.3.2" } ], "libaio": [ { "arch": "x86_64", "epoch": null, "name": "libaio", "release": "22.el10", "source": "rpm", "version": "0.3.111" } ], "libarchive": [ { "arch": "x86_64", "epoch": null, "name": "libarchive", "release": "4.el10", "source": "rpm", "version": "3.7.7" } ], "libattr": [ { "arch": "x86_64", "epoch": null, "name": "libattr", "release": "5.el10", "source": "rpm", "version": "2.5.2" } ], "libbasicobjects": [ { "arch": "x86_64", "epoch": null, "name": "libbasicobjects", "release": "58.el10", "source": "rpm", "version": "0.1.1" } ], "libblkid": [ { "arch": "x86_64", "epoch": null, "name": "libblkid", "release": "19.el10", "source": "rpm", "version": "2.40.2" } ], "libblockdev": [ { "arch": "x86_64", "epoch": null, "name": "libblockdev", "release": "2.el10", "source": "rpm", "version": "3.4.0" } ], "libblockdev-crypto": [ { "arch": "x86_64", "epoch": null, "name": "libblockdev-crypto", "release": "2.el10", "source": "rpm", "version": "3.4.0" } ], "libblockdev-dm": [ { "arch": "x86_64", "epoch": null, "name": "libblockdev-dm", "release": "2.el10", "source": "rpm", "version": "3.4.0" } ], "libblockdev-fs": [ { "arch": "x86_64", "epoch": null, "name": "libblockdev-fs", "release": "2.el10", "source": "rpm", "version": "3.4.0" } ], "libblockdev-loop": [ { "arch": "x86_64", "epoch": null, "name": "libblockdev-loop", "release": "2.el10", "source": "rpm", "version": "3.4.0" } ], "libblockdev-lvm": [ { "arch": "x86_64", "epoch": null, "name": "libblockdev-lvm", "release": "2.el10", "source": "rpm", "version": "3.4.0" } ], "libblockdev-mdraid": [ { "arch": "x86_64", "epoch": null, "name": "libblockdev-mdraid", "release": "2.el10", "source": "rpm", "version": "3.4.0" } ], "libblockdev-mpath": [ { "arch": "x86_64", "epoch": null, "name": "libblockdev-mpath", "release": "2.el10", "source": "rpm", "version": "3.4.0" } ], "libblockdev-nvme": [ { "arch": "x86_64", "epoch": null, "name": "libblockdev-nvme", "release": "2.el10", "source": "rpm", "version": "3.4.0" } ], "libblockdev-part": [ { "arch": "x86_64", "epoch": null, "name": "libblockdev-part", "release": "2.el10", "source": "rpm", "version": "3.4.0" } ], "libblockdev-swap": [ { "arch": "x86_64", "epoch": null, "name": "libblockdev-swap", "release": "2.el10", "source": "rpm", "version": "3.4.0" } ], "libblockdev-utils": [ { "arch": "x86_64", "epoch": null, "name": "libblockdev-utils", "release": "2.el10", "source": "rpm", "version": "3.4.0" } ], "libbpf": [ { "arch": "x86_64", "epoch": 2, "name": "libbpf", "release": "1.el10", "source": "rpm", "version": "1.7.0" } ], "libbrotli": [ { "arch": "x86_64", "epoch": null, "name": "libbrotli", "release": "7.el10", "source": "rpm", "version": "1.1.0" } ], "libbytesize": [ { "arch": "x86_64", "epoch": null, "name": "libbytesize", "release": "5.el10", "source": "rpm", "version": "2.10" } ], "libcap": [ { "arch": "x86_64", "epoch": null, "name": "libcap", "release": "7.el10", "source": "rpm", "version": "2.69" } ], "libcap-ng": [ { "arch": "x86_64", "epoch": null, "name": "libcap-ng", "release": "6.el10", "source": "rpm", "version": "0.8.4" } ], "libcap-ng-python3": [ { "arch": "x86_64", "epoch": null, "name": "libcap-ng-python3", "release": "6.el10", "source": "rpm", "version": "0.8.4" } ], "libcbor": [ { "arch": "x86_64", "epoch": null, "name": "libcbor", "release": "3.el10", "source": "rpm", "version": "0.11.0" } ], "libcollection": [ { "arch": "x86_64", "epoch": null, "name": "libcollection", "release": "58.el10", "source": "rpm", "version": "0.7.0" } ], "libcom_err": [ { "arch": "x86_64", "epoch": null, "name": "libcom_err", "release": "5.el10", "source": "rpm", "version": "1.47.1" } ], "libcomps": [ { "arch": "x86_64", "epoch": null, "name": "libcomps", "release": "3.el10", "source": "rpm", "version": "0.1.21" } ], "libcurl": [ { "arch": "x86_64", "epoch": null, "name": "libcurl", "release": "4.el10", "source": "rpm", "version": "8.12.1" } ], "libdhash": [ { "arch": "x86_64", "epoch": null, "name": "libdhash", "release": "58.el10", "source": "rpm", "version": "0.5.0" } ], "libdnf": [ { "arch": "x86_64", "epoch": null, "name": "libdnf", "release": "14.el10", "source": "rpm", "version": "0.73.1" } ], "libeconf": [ { "arch": "x86_64", "epoch": null, "name": "libeconf", "release": "4.el10", "source": "rpm", "version": "0.6.2" } ], "libedit": [ { "arch": "x86_64", "epoch": null, "name": "libedit", "release": "52.20230828cvs.el10", "source": "rpm", "version": "3.1" } ], "libestr": [ { "arch": "x86_64", "epoch": null, "name": "libestr", "release": "11.el10", "source": "rpm", "version": "0.1.11" } ], "libev": [ { "arch": "x86_64", "epoch": null, "name": "libev", "release": "15.el10", "source": "rpm", "version": "4.33" } ], "libevent": [ { "arch": "x86_64", "epoch": null, "name": "libevent", "release": "16.el10", "source": "rpm", "version": "2.1.12" } ], "libfastjson": [ { "arch": "x86_64", "epoch": null, "name": "libfastjson", "release": "6.el10", "source": "rpm", "version": "1.2304.0" } ], "libfdisk": [ { "arch": "x86_64", "epoch": null, "name": "libfdisk", "release": "19.el10", "source": "rpm", "version": "2.40.2" } ], "libffi": [ { "arch": "x86_64", "epoch": null, "name": "libffi", "release": "10.el10", "source": "rpm", "version": "3.4.4" } ], "libfido2": [ { "arch": "x86_64", "epoch": null, "name": "libfido2", "release": "7.el10", "source": "rpm", "version": "1.14.0" } ], "libgcc": [ { "arch": "x86_64", "epoch": null, "name": "libgcc", "release": "4.4.el10", "source": "rpm", "version": "14.3.1" } ], "libgomp": [ { "arch": "x86_64", "epoch": null, "name": "libgomp", "release": "4.4.el10", "source": "rpm", "version": "14.3.1" } ], "libidn2": [ { "arch": "x86_64", "epoch": null, "name": "libidn2", "release": "3.el10", "source": "rpm", "version": "2.3.7" } ], "libini_config": [ { "arch": "x86_64", "epoch": null, "name": "libini_config", "release": "58.el10", "source": "rpm", "version": "1.3.1" } ], "libjose": [ { "arch": "x86_64", "epoch": null, "name": "libjose", "release": "102.el10", "source": "rpm", "version": "14" } ], "libkcapi": [ { "arch": "x86_64", "epoch": null, "name": "libkcapi", "release": "3.el10", "source": "rpm", "version": "1.5.0" } ], "libkcapi-hasher": [ { "arch": "x86_64", "epoch": null, "name": "libkcapi-hasher", "release": "3.el10", "source": "rpm", "version": "1.5.0" } ], "libkcapi-hmaccalc": [ { "arch": "x86_64", "epoch": null, "name": "libkcapi-hmaccalc", "release": "3.el10", "source": "rpm", "version": "1.5.0" } ], "libldb": [ { "arch": "x86_64", "epoch": 0, "name": "libldb", "release": "105.el10", "source": "rpm", "version": "4.23.5" } ], "libluksmeta": [ { "arch": "x86_64", "epoch": null, "name": "libluksmeta", "release": "1.el10", "source": "rpm", "version": "10" } ], "libmnl": [ { "arch": "x86_64", "epoch": null, "name": "libmnl", "release": "7.el10", "source": "rpm", "version": "1.0.5" } ], "libmodulemd": [ { "arch": "x86_64", "epoch": null, "name": "libmodulemd", "release": "12.el10", "source": "rpm", "version": "2.15.0" } ], "libmount": [ { "arch": "x86_64", "epoch": null, "name": "libmount", "release": "19.el10", "source": "rpm", "version": "2.40.2" } ], "libmpc": [ { "arch": "x86_64", "epoch": null, "name": "libmpc", "release": "7.el10", "source": "rpm", "version": "1.3.1" } ], "libndp": [ { "arch": "x86_64", "epoch": null, "name": "libndp", "release": "2.el10", "source": "rpm", "version": "1.9" } ], "libnetfilter_conntrack": [ { "arch": "x86_64", "epoch": null, "name": "libnetfilter_conntrack", "release": "12.el10", "source": "rpm", "version": "1.0.9" } ], "libnfnetlink": [ { "arch": "x86_64", "epoch": null, "name": "libnfnetlink", "release": "3.el10", "source": "rpm", "version": "1.0.2" } ], "libnfsidmap": [ { "arch": "x86_64", "epoch": 1, "name": "libnfsidmap", "release": "5.el10", "source": "rpm", "version": "2.8.3" } ], "libnftnl": [ { "arch": "x86_64", "epoch": null, "name": "libnftnl", "release": "3.el10", "source": "rpm", "version": "1.3.0" } ], "libnghttp2": [ { "arch": "x86_64", "epoch": null, "name": "libnghttp2", "release": "3.el10", "source": "rpm", "version": "1.68.0" } ], "libnl3": [ { "arch": "x86_64", "epoch": null, "name": "libnl3", "release": "1.el10", "source": "rpm", "version": "3.11.0" } ], "libnvme": [ { "arch": "x86_64", "epoch": null, "name": "libnvme", "release": "2.el10", "source": "rpm", "version": "1.16.1" } ], "libpath_utils": [ { "arch": "x86_64", "epoch": null, "name": "libpath_utils", "release": "58.el10", "source": "rpm", "version": "0.2.1" } ], "libpipeline": [ { "arch": "x86_64", "epoch": null, "name": "libpipeline", "release": "7.el10", "source": "rpm", "version": "1.5.7" } ], "libpkgconf": [ { "arch": "x86_64", "epoch": null, "name": "libpkgconf", "release": "3.el10", "source": "rpm", "version": "2.1.0" } ], "libpsl": [ { "arch": "x86_64", "epoch": null, "name": "libpsl", "release": "6.el10", "source": "rpm", "version": "0.21.5" } ], "libpwquality": [ { "arch": "x86_64", "epoch": null, "name": "libpwquality", "release": "12.el10", "source": "rpm", "version": "1.4.5" } ], "libref_array": [ { "arch": "x86_64", "epoch": null, "name": "libref_array", "release": "58.el10", "source": "rpm", "version": "0.1.5" } ], "librepo": [ { "arch": "x86_64", "epoch": null, "name": "librepo", "release": "1.el10", "source": "rpm", "version": "1.19.0" } ], "libseccomp": [ { "arch": "x86_64", "epoch": null, "name": "libseccomp", "release": "1.el10", "source": "rpm", "version": "2.5.6" } ], "libselinux": [ { "arch": "x86_64", "epoch": null, "name": "libselinux", "release": "1.el10", "source": "rpm", "version": "3.10" } ], "libselinux-utils": [ { "arch": "x86_64", "epoch": null, "name": "libselinux-utils", "release": "1.el10", "source": "rpm", "version": "3.10" } ], "libsemanage": [ { "arch": "x86_64", "epoch": null, "name": "libsemanage", "release": "1.el10", "source": "rpm", "version": "3.10" } ], "libsepol": [ { "arch": "x86_64", "epoch": null, "name": "libsepol", "release": "1.el10", "source": "rpm", "version": "3.10" } ], "libsmartcols": [ { "arch": "x86_64", "epoch": null, "name": "libsmartcols", "release": "19.el10", "source": "rpm", "version": "2.40.2" } ], "libsolv": [ { "arch": "x86_64", "epoch": null, "name": "libsolv", "release": "2.el10", "source": "rpm", "version": "0.7.33" } ], "libss": [ { "arch": "x86_64", "epoch": null, "name": "libss", "release": "5.el10", "source": "rpm", "version": "1.47.1" } ], "libssh": [ { "arch": "x86_64", "epoch": null, "name": "libssh", "release": "2.el10", "source": "rpm", "version": "0.12.0" } ], "libssh-config": [ { "arch": "noarch", "epoch": null, "name": "libssh-config", "release": "2.el10", "source": "rpm", "version": "0.12.0" } ], "libsss_certmap": [ { "arch": "x86_64", "epoch": null, "name": "libsss_certmap", "release": "1.el10", "source": "rpm", "version": "2.12.0" } ], "libsss_idmap": [ { "arch": "x86_64", "epoch": null, "name": "libsss_idmap", "release": "1.el10", "source": "rpm", "version": "2.12.0" } ], "libsss_nss_idmap": [ { "arch": "x86_64", "epoch": null, "name": "libsss_nss_idmap", "release": "1.el10", "source": "rpm", "version": "2.12.0" } ], "libsss_sudo": [ { "arch": "x86_64", "epoch": null, "name": "libsss_sudo", "release": "1.el10", "source": "rpm", "version": "2.12.0" } ], "libstdc++": [ { "arch": "x86_64", "epoch": null, "name": "libstdc++", "release": "4.4.el10", "source": "rpm", "version": "14.3.1" } ], "libsysfs": [ { "arch": "x86_64", "epoch": null, "name": "libsysfs", "release": "15.el10", "source": "rpm", "version": "2.1.1" } ], "libtalloc": [ { "arch": "x86_64", "epoch": null, "name": "libtalloc", "release": "100.el10", "source": "rpm", "version": "2.4.3" } ], "libtasn1": [ { "arch": "x86_64", "epoch": null, "name": "libtasn1", "release": "1.el10", "source": "rpm", "version": "4.20.0" } ], "libtdb": [ { "arch": "x86_64", "epoch": null, "name": "libtdb", "release": "100.el10", "source": "rpm", "version": "1.4.14" } ], "libtevent": [ { "arch": "x86_64", "epoch": null, "name": "libtevent", "release": "100.el10", "source": "rpm", "version": "0.17.1" } ], "libtirpc": [ { "arch": "x86_64", "epoch": null, "name": "libtirpc", "release": "1.el10", "source": "rpm", "version": "1.3.5" } ], "libunistring": [ { "arch": "x86_64", "epoch": null, "name": "libunistring", "release": "10.el10", "source": "rpm", "version": "1.1" } ], "liburing": [ { "arch": "x86_64", "epoch": null, "name": "liburing", "release": "1.el10", "source": "rpm", "version": "2.12" } ], "libutempter": [ { "arch": "x86_64", "epoch": null, "name": "libutempter", "release": "15.el10", "source": "rpm", "version": "1.2.1" } ], "libuuid": [ { "arch": "x86_64", "epoch": null, "name": "libuuid", "release": "19.el10", "source": "rpm", "version": "2.40.2" } ], "libverto": [ { "arch": "x86_64", "epoch": null, "name": "libverto", "release": "10.el10", "source": "rpm", "version": "0.3.2" } ], "libverto-libev": [ { "arch": "x86_64", "epoch": null, "name": "libverto-libev", "release": "10.el10", "source": "rpm", "version": "0.3.2" } ], "libxcrypt": [ { "arch": "x86_64", "epoch": null, "name": "libxcrypt", "release": "10.el10", "source": "rpm", "version": "4.4.36" } ], "libxcrypt-devel": [ { "arch": "x86_64", "epoch": null, "name": "libxcrypt-devel", "release": "10.el10", "source": "rpm", "version": "4.4.36" } ], "libxkbcommon": [ { "arch": "x86_64", "epoch": null, "name": "libxkbcommon", "release": "4.el10", "source": "rpm", "version": "1.7.0" } ], "libxml2": [ { "arch": "x86_64", "epoch": null, "name": "libxml2", "release": "10.el10", "source": "rpm", "version": "2.12.5" } ], "libxslt": [ { "arch": "x86_64", "epoch": null, "name": "libxslt", "release": "8.el10", "source": "rpm", "version": "1.1.39" } ], "libyaml": [ { "arch": "x86_64", "epoch": null, "name": "libyaml", "release": "16.el10", "source": "rpm", "version": "0.2.5" } ], "libzstd": [ { "arch": "x86_64", "epoch": null, "name": "libzstd", "release": "9.el10", "source": "rpm", "version": "1.5.5" } ], "libzstd-devel": [ { "arch": "x86_64", "epoch": null, "name": "libzstd-devel", "release": "9.el10", "source": "rpm", "version": "1.5.5" } ], "linux-firmware": [ { "arch": "noarch", "epoch": null, "name": "linux-firmware", "release": "22.el10", "source": "rpm", "version": "20260130" } ], "linux-firmware-whence": [ { "arch": "noarch", "epoch": null, "name": "linux-firmware-whence", "release": "22.el10", "source": "rpm", "version": "20260130" } ], "lmdb-libs": [ { "arch": "x86_64", "epoch": null, "name": "lmdb-libs", "release": "4.el10", "source": "rpm", "version": "0.9.32" } ], "logrotate": [ { "arch": "x86_64", "epoch": null, "name": "logrotate", "release": "5.el10", "source": "rpm", "version": "3.22.0" } ], "lshw": [ { "arch": "x86_64", "epoch": null, "name": "lshw", "release": "9.el10", "source": "rpm", "version": "B.02.20" } ], "lsof": [ { "arch": "x86_64", "epoch": null, "name": "lsof", "release": "7.el10", "source": "rpm", "version": "4.98.0" } ], "lsscsi": [ { "arch": "x86_64", "epoch": null, "name": "lsscsi", "release": "15.el10", "source": "rpm", "version": "0.32" } ], "lua-libs": [ { "arch": "x86_64", "epoch": null, "name": "lua-libs", "release": "7.el10", "source": "rpm", "version": "5.4.6" } ], "luksmeta": [ { "arch": "x86_64", "epoch": null, "name": "luksmeta", "release": "1.el10", "source": "rpm", "version": "10" } ], "lvm2": [ { "arch": "x86_64", "epoch": 10, "name": "lvm2", "release": "2.el10", "source": "rpm", "version": "2.03.36" } ], "lvm2-libs": [ { "arch": "x86_64", "epoch": 10, "name": "lvm2-libs", "release": "2.el10", "source": "rpm", "version": "2.03.36" } ], "lz4-libs": [ { "arch": "x86_64", "epoch": null, "name": "lz4-libs", "release": "8.el10", "source": "rpm", "version": "1.9.4" } ], "lzo": [ { "arch": "x86_64", "epoch": null, "name": "lzo", "release": "14.el10", "source": "rpm", "version": "2.10" } ], "m4": [ { "arch": "x86_64", "epoch": null, "name": "m4", "release": "11.el10", "source": "rpm", "version": "1.4.19" } ], "make": [ { "arch": "x86_64", "epoch": 1, "name": "make", "release": "9.el10", "source": "rpm", "version": "4.4.1" } ], "makedumpfile": [ { "arch": "x86_64", "epoch": null, "name": "makedumpfile", "release": "1.el10", "source": "rpm", "version": "1.7.8" } ], "man-db": [ { "arch": "x86_64", "epoch": null, "name": "man-db", "release": "10.el10", "source": "rpm", "version": "2.12.0" } ], "mdadm": [ { "arch": "x86_64", "epoch": null, "name": "mdadm", "release": "3.el10", "source": "rpm", "version": "4.4" } ], "memstrack": [ { "arch": "x86_64", "epoch": null, "name": "memstrack", "release": "6.el10", "source": "rpm", "version": "0.2.5" } ], "microcode_ctl": [ { "arch": "noarch", "epoch": 4, "name": "microcode_ctl", "release": "1.el10", "source": "rpm", "version": "20260210" } ], "mokutil": [ { "arch": "x86_64", "epoch": 2, "name": "mokutil", "release": "4.el10", "source": "rpm", "version": "0.7.2" } ], "mpdecimal": [ { "arch": "x86_64", "epoch": null, "name": "mpdecimal", "release": "12.el10", "source": "rpm", "version": "2.5.1" } ], "mpfr": [ { "arch": "x86_64", "epoch": null, "name": "mpfr", "release": "8.el10", "source": "rpm", "version": "4.2.1" } ], "mt7xxx-firmware": [ { "arch": "noarch", "epoch": null, "name": "mt7xxx-firmware", "release": "22.el10", "source": "rpm", "version": "20260130" } ], "ncurses": [ { "arch": "x86_64", "epoch": null, "name": "ncurses", "release": "14.20240127.el10", "source": "rpm", "version": "6.4" } ], "ncurses-base": [ { "arch": "noarch", "epoch": null, "name": "ncurses-base", "release": "14.20240127.el10", "source": "rpm", "version": "6.4" } ], "ncurses-libs": [ { "arch": "x86_64", "epoch": null, "name": "ncurses-libs", "release": "14.20240127.el10", "source": "rpm", "version": "6.4" } ], "newt": [ { "arch": "x86_64", "epoch": null, "name": "newt", "release": "5.el10", "source": "rpm", "version": "0.52.24" } ], "nfs-utils": [ { "arch": "x86_64", "epoch": 1, "name": "nfs-utils", "release": "5.el10", "source": "rpm", "version": "2.8.3" } ], "nftables": [ { "arch": "x86_64", "epoch": 1, "name": "nftables", "release": "3.el10", "source": "rpm", "version": "1.1.5" } ], "nspr": [ { "arch": "x86_64", "epoch": null, "name": "nspr", "release": "8.el10", "source": "rpm", "version": "4.36.0" } ], "nss": [ { "arch": "x86_64", "epoch": null, "name": "nss", "release": "8.el10", "source": "rpm", "version": "3.112.0" } ], "nss-softokn": [ { "arch": "x86_64", "epoch": null, "name": "nss-softokn", "release": "8.el10", "source": "rpm", "version": "3.112.0" } ], "nss-softokn-freebl": [ { "arch": "x86_64", "epoch": null, "name": "nss-softokn-freebl", "release": "8.el10", "source": "rpm", "version": "3.112.0" } ], "nss-sysinit": [ { "arch": "x86_64", "epoch": null, "name": "nss-sysinit", "release": "8.el10", "source": "rpm", "version": "3.112.0" } ], "nss-util": [ { "arch": "x86_64", "epoch": null, "name": "nss-util", "release": "8.el10", "source": "rpm", "version": "3.112.0" } ], "numactl-libs": [ { "arch": "x86_64", "epoch": null, "name": "numactl-libs", "release": "3.el10", "source": "rpm", "version": "2.0.19" } ], "nvidia-gpu-firmware": [ { "arch": "noarch", "epoch": null, "name": "nvidia-gpu-firmware", "release": "22.el10", "source": "rpm", "version": "20260130" } ], "nxpwireless-firmware": [ { "arch": "noarch", "epoch": null, "name": "nxpwireless-firmware", "release": "22.el10", "source": "rpm", "version": "20260130" } ], "oniguruma": [ { "arch": "x86_64", "epoch": null, "name": "oniguruma", "release": "7.el10", "source": "rpm", "version": "6.9.9" } ], "openldap": [ { "arch": "x86_64", "epoch": null, "name": "openldap", "release": "1.el10", "source": "rpm", "version": "2.6.10" } ], "openssh": [ { "arch": "x86_64", "epoch": null, "name": "openssh", "release": "23.el10", "source": "rpm", "version": "9.9p1" } ], "openssh-clients": [ { "arch": "x86_64", "epoch": null, "name": "openssh-clients", "release": "23.el10", "source": "rpm", "version": "9.9p1" } ], "openssh-server": [ { "arch": "x86_64", "epoch": null, "name": "openssh-server", "release": "23.el10", "source": "rpm", "version": "9.9p1" } ], "openssl": [ { "arch": "x86_64", "epoch": 1, "name": "openssl", "release": "1.el10", "source": "rpm", "version": "3.5.5" } ], "openssl-devel": [ { "arch": "x86_64", "epoch": 1, "name": "openssl-devel", "release": "1.el10", "source": "rpm", "version": "3.5.5" } ], "openssl-fips-provider": [ { "arch": "x86_64", "epoch": 1, "name": "openssl-fips-provider", "release": "1.el10", "source": "rpm", "version": "3.5.5" } ], "openssl-libs": [ { "arch": "x86_64", "epoch": 1, "name": "openssl-libs", "release": "1.el10", "source": "rpm", "version": "3.5.5" } ], "os-prober": [ { "arch": "x86_64", "epoch": null, "name": "os-prober", "release": "9.el10", "source": "rpm", "version": "1.81" } ], "p11-kit": [ { "arch": "x86_64", "epoch": null, "name": "p11-kit", "release": "1.el10", "source": "rpm", "version": "0.26.2" } ], "p11-kit-trust": [ { "arch": "x86_64", "epoch": null, "name": "p11-kit-trust", "release": "1.el10", "source": "rpm", "version": "0.26.2" } ], "pam": [ { "arch": "x86_64", "epoch": null, "name": "pam", "release": "9.el10", "source": "rpm", "version": "1.6.1" } ], "pam-libs": [ { "arch": "x86_64", "epoch": null, "name": "pam-libs", "release": "9.el10", "source": "rpm", "version": "1.6.1" } ], "parted": [ { "arch": "x86_64", "epoch": null, "name": "parted", "release": "7.el10", "source": "rpm", "version": "3.6" } ], "pciutils-libs": [ { "arch": "x86_64", "epoch": null, "name": "pciutils-libs", "release": "6.el10", "source": "rpm", "version": "3.13.0" } ], "pcre2": [ { "arch": "x86_64", "epoch": null, "name": "pcre2", "release": "1.el10.3", "source": "rpm", "version": "10.44" } ], "pcre2-syntax": [ { "arch": "noarch", "epoch": null, "name": "pcre2-syntax", "release": "1.el10.3", "source": "rpm", "version": "10.44" } ], "perl-AutoLoader": [ { "arch": "noarch", "epoch": 0, "name": "perl-AutoLoader", "release": "514.el10", "source": "rpm", "version": "5.74" } ], "perl-B": [ { "arch": "x86_64", "epoch": 0, "name": "perl-B", "release": "514.el10", "source": "rpm", "version": "1.89" } ], "perl-Carp": [ { "arch": "noarch", "epoch": null, "name": "perl-Carp", "release": "511.el10", "source": "rpm", "version": "1.54" } ], "perl-Class-Struct": [ { "arch": "noarch", "epoch": 0, "name": "perl-Class-Struct", "release": "514.el10", "source": "rpm", "version": "0.68" } ], "perl-Data-Dumper": [ { "arch": "x86_64", "epoch": null, "name": "perl-Data-Dumper", "release": "512.el10", "source": "rpm", "version": "2.189" } ], "perl-Digest": [ { "arch": "noarch", "epoch": null, "name": "perl-Digest", "release": "511.el10", "source": "rpm", "version": "1.20" } ], "perl-Digest-MD5": [ { "arch": "x86_64", "epoch": null, "name": "perl-Digest-MD5", "release": "6.el10", "source": "rpm", "version": "2.59" } ], "perl-DynaLoader": [ { "arch": "x86_64", "epoch": 0, "name": "perl-DynaLoader", "release": "514.el10", "source": "rpm", "version": "1.56" } ], "perl-Encode": [ { "arch": "x86_64", "epoch": 4, "name": "perl-Encode", "release": "511.el10", "source": "rpm", "version": "3.21" } ], "perl-Errno": [ { "arch": "x86_64", "epoch": 0, "name": "perl-Errno", "release": "514.el10", "source": "rpm", "version": "1.38" } ], "perl-Error": [ { "arch": "noarch", "epoch": 1, "name": "perl-Error", "release": "18.el10", "source": "rpm", "version": "0.17029" } ], "perl-Exporter": [ { "arch": "noarch", "epoch": null, "name": "perl-Exporter", "release": "511.el10", "source": "rpm", "version": "5.78" } ], "perl-Fcntl": [ { "arch": "x86_64", "epoch": 0, "name": "perl-Fcntl", "release": "514.el10", "source": "rpm", "version": "1.18" } ], "perl-File-Basename": [ { "arch": "noarch", "epoch": 0, "name": "perl-File-Basename", "release": "514.el10", "source": "rpm", "version": "2.86" } ], "perl-File-Path": [ { "arch": "noarch", "epoch": null, "name": "perl-File-Path", "release": "512.el10", "source": "rpm", "version": "2.18" } ], "perl-File-Temp": [ { "arch": "noarch", "epoch": 1, "name": "perl-File-Temp", "release": "512.el10", "source": "rpm", "version": "0.231.100" } ], "perl-File-stat": [ { "arch": "noarch", "epoch": 0, "name": "perl-File-stat", "release": "514.el10", "source": "rpm", "version": "1.14" } ], "perl-FileHandle": [ { "arch": "noarch", "epoch": 0, "name": "perl-FileHandle", "release": "514.el10", "source": "rpm", "version": "2.05" } ], "perl-Getopt-Long": [ { "arch": "noarch", "epoch": 1, "name": "perl-Getopt-Long", "release": "3.el10", "source": "rpm", "version": "2.58" } ], "perl-Getopt-Std": [ { "arch": "noarch", "epoch": 0, "name": "perl-Getopt-Std", "release": "514.el10", "source": "rpm", "version": "1.14" } ], "perl-Git": [ { "arch": "noarch", "epoch": null, "name": "perl-Git", "release": "1.el10", "source": "rpm", "version": "2.52.0" } ], "perl-HTTP-Tiny": [ { "arch": "noarch", "epoch": null, "name": "perl-HTTP-Tiny", "release": "512.el10", "source": "rpm", "version": "0.088" } ], "perl-IO": [ { "arch": "x86_64", "epoch": 0, "name": "perl-IO", "release": "514.el10", "source": "rpm", "version": "1.55" } ], "perl-IO-Socket-IP": [ { "arch": "noarch", "epoch": null, "name": "perl-IO-Socket-IP", "release": "512.el10", "source": "rpm", "version": "0.42" } ], "perl-IO-Socket-SSL": [ { "arch": "noarch", "epoch": null, "name": "perl-IO-Socket-SSL", "release": "3.el10", "source": "rpm", "version": "2.085" } ], "perl-IPC-Open3": [ { "arch": "noarch", "epoch": 0, "name": "perl-IPC-Open3", "release": "514.el10", "source": "rpm", "version": "1.22" } ], "perl-MIME-Base64": [ { "arch": "x86_64", "epoch": null, "name": "perl-MIME-Base64", "release": "511.el10", "source": "rpm", "version": "3.16" } ], "perl-Mozilla-CA": [ { "arch": "noarch", "epoch": null, "name": "perl-Mozilla-CA", "release": "5.el10", "source": "rpm", "version": "20231213" } ], "perl-NDBM_File": [ { "arch": "x86_64", "epoch": 0, "name": "perl-NDBM_File", "release": "514.el10", "source": "rpm", "version": "1.17" } ], "perl-Net-SSLeay": [ { "arch": "x86_64", "epoch": null, "name": "perl-Net-SSLeay", "release": "8.el10", "source": "rpm", "version": "1.94" } ], "perl-POSIX": [ { "arch": "x86_64", "epoch": 0, "name": "perl-POSIX", "release": "514.el10", "source": "rpm", "version": "2.20" } ], "perl-PathTools": [ { "arch": "x86_64", "epoch": null, "name": "perl-PathTools", "release": "512.el10", "source": "rpm", "version": "3.91" } ], "perl-Pod-Escapes": [ { "arch": "noarch", "epoch": 1, "name": "perl-Pod-Escapes", "release": "511.el10", "source": "rpm", "version": "1.07" } ], "perl-Pod-Perldoc": [ { "arch": "noarch", "epoch": null, "name": "perl-Pod-Perldoc", "release": "512.el10", "source": "rpm", "version": "3.28.01" } ], "perl-Pod-Simple": [ { "arch": "noarch", "epoch": 1, "name": "perl-Pod-Simple", "release": "511.el10", "source": "rpm", "version": "3.45" } ], "perl-Pod-Usage": [ { "arch": "noarch", "epoch": 4, "name": "perl-Pod-Usage", "release": "511.el10", "source": "rpm", "version": "2.03" } ], "perl-Scalar-List-Utils": [ { "arch": "x86_64", "epoch": 5, "name": "perl-Scalar-List-Utils", "release": "511.el10", "source": "rpm", "version": "1.63" } ], "perl-SelectSaver": [ { "arch": "noarch", "epoch": 0, "name": "perl-SelectSaver", "release": "514.el10", "source": "rpm", "version": "1.02" } ], "perl-Socket": [ { "arch": "x86_64", "epoch": 4, "name": "perl-Socket", "release": "511.el10", "source": "rpm", "version": "2.038" } ], "perl-Storable": [ { "arch": "x86_64", "epoch": 1, "name": "perl-Storable", "release": "511.el10", "source": "rpm", "version": "3.32" } ], "perl-Symbol": [ { "arch": "noarch", "epoch": 0, "name": "perl-Symbol", "release": "514.el10", "source": "rpm", "version": "1.09" } ], "perl-Term-ANSIColor": [ { "arch": "noarch", "epoch": null, "name": "perl-Term-ANSIColor", "release": "512.el10", "source": "rpm", "version": "5.01" } ], "perl-Term-Cap": [ { "arch": "noarch", "epoch": null, "name": "perl-Term-Cap", "release": "511.el10", "source": "rpm", "version": "1.18" } ], "perl-TermReadKey": [ { "arch": "x86_64", "epoch": null, "name": "perl-TermReadKey", "release": "24.el10", "source": "rpm", "version": "2.38" } ], "perl-Text-ParseWords": [ { "arch": "noarch", "epoch": null, "name": "perl-Text-ParseWords", "release": "511.el10", "source": "rpm", "version": "3.31" } ], "perl-Text-Tabs+Wrap": [ { "arch": "noarch", "epoch": null, "name": "perl-Text-Tabs+Wrap", "release": "511.el10", "source": "rpm", "version": "2024.001" } ], "perl-Time-Local": [ { "arch": "noarch", "epoch": 2, "name": "perl-Time-Local", "release": "511.el10", "source": "rpm", "version": "1.350" } ], "perl-URI": [ { "arch": "noarch", "epoch": null, "name": "perl-URI", "release": "3.el10", "source": "rpm", "version": "5.27" } ], "perl-base": [ { "arch": "noarch", "epoch": 0, "name": "perl-base", "release": "514.el10", "source": "rpm", "version": "2.27" } ], "perl-constant": [ { "arch": "noarch", "epoch": null, "name": "perl-constant", "release": "512.el10", "source": "rpm", "version": "1.33" } ], "perl-if": [ { "arch": "noarch", "epoch": 0, "name": "perl-if", "release": "514.el10", "source": "rpm", "version": "0.61.000" } ], "perl-interpreter": [ { "arch": "x86_64", "epoch": 4, "name": "perl-interpreter", "release": "514.el10", "source": "rpm", "version": "5.40.2" } ], "perl-lib": [ { "arch": "x86_64", "epoch": 0, "name": "perl-lib", "release": "514.el10", "source": "rpm", "version": "0.65" } ], "perl-libnet": [ { "arch": "noarch", "epoch": null, "name": "perl-libnet", "release": "512.el10", "source": "rpm", "version": "3.15" } ], "perl-libs": [ { "arch": "x86_64", "epoch": 4, "name": "perl-libs", "release": "514.el10", "source": "rpm", "version": "5.40.2" } ], "perl-locale": [ { "arch": "noarch", "epoch": 0, "name": "perl-locale", "release": "514.el10", "source": "rpm", "version": "1.12" } ], "perl-mro": [ { "arch": "x86_64", "epoch": 0, "name": "perl-mro", "release": "514.el10", "source": "rpm", "version": "1.29" } ], "perl-overload": [ { "arch": "noarch", "epoch": 0, "name": "perl-overload", "release": "514.el10", "source": "rpm", "version": "1.37" } ], "perl-overloading": [ { "arch": "noarch", "epoch": 0, "name": "perl-overloading", "release": "514.el10", "source": "rpm", "version": "0.02" } ], "perl-parent": [ { "arch": "noarch", "epoch": 1, "name": "perl-parent", "release": "512.el10", "source": "rpm", "version": "0.241" } ], "perl-podlators": [ { "arch": "noarch", "epoch": 1, "name": "perl-podlators", "release": "511.el10", "source": "rpm", "version": "5.01" } ], "perl-vars": [ { "arch": "noarch", "epoch": 0, "name": "perl-vars", "release": "514.el10", "source": "rpm", "version": "1.05" } ], "pigz": [ { "arch": "x86_64", "epoch": null, "name": "pigz", "release": "7.el10", "source": "rpm", "version": "2.8" } ], "pkgconf": [ { "arch": "x86_64", "epoch": null, "name": "pkgconf", "release": "3.el10", "source": "rpm", "version": "2.1.0" } ], "pkgconf-m4": [ { "arch": "noarch", "epoch": null, "name": "pkgconf-m4", "release": "3.el10", "source": "rpm", "version": "2.1.0" } ], "pkgconf-pkg-config": [ { "arch": "x86_64", "epoch": null, "name": "pkgconf-pkg-config", "release": "3.el10", "source": "rpm", "version": "2.1.0" } ], "policycoreutils": [ { "arch": "x86_64", "epoch": null, "name": "policycoreutils", "release": "1.el10", "source": "rpm", "version": "3.10" } ], "popt": [ { "arch": "x86_64", "epoch": null, "name": "popt", "release": "8.el10", "source": "rpm", "version": "1.19" } ], "prefixdevname": [ { "arch": "x86_64", "epoch": null, "name": "prefixdevname", "release": "4.el10", "source": "rpm", "version": "0.2.0" } ], "procps-ng": [ { "arch": "x86_64", "epoch": null, "name": "procps-ng", "release": "11.el10", "source": "rpm", "version": "4.0.4" } ], "psmisc": [ { "arch": "x86_64", "epoch": null, "name": "psmisc", "release": "8.el10", "source": "rpm", "version": "23.6" } ], "publicsuffix-list-dafsa": [ { "arch": "noarch", "epoch": null, "name": "publicsuffix-list-dafsa", "release": "5.el10", "source": "rpm", "version": "20240107" } ], "python-unversioned-command": [ { "arch": "noarch", "epoch": null, "name": "python-unversioned-command", "release": "5.el10", "source": "rpm", "version": "3.12.12" } ], "python3": [ { "arch": "x86_64", "epoch": null, "name": "python3", "release": "5.el10", "source": "rpm", "version": "3.12.12" } ], "python3-attrs": [ { "arch": "noarch", "epoch": null, "name": "python3-attrs", "release": "7.el10", "source": "rpm", "version": "23.2.0" } ], "python3-audit": [ { "arch": "x86_64", "epoch": null, "name": "python3-audit", "release": "5.el10", "source": "rpm", "version": "4.0.3" } ], "python3-blivet": [ { "arch": "noarch", "epoch": 1, "name": "python3-blivet", "release": "5.el10", "source": "rpm", "version": "3.13.0" } ], "python3-blockdev": [ { "arch": "x86_64", "epoch": null, "name": "python3-blockdev", "release": "2.el10", "source": "rpm", "version": "3.4.0" } ], "python3-bytesize": [ { "arch": "x86_64", "epoch": null, "name": "python3-bytesize", "release": "5.el10", "source": "rpm", "version": "2.10" } ], "python3-charset-normalizer": [ { "arch": "noarch", "epoch": null, "name": "python3-charset-normalizer", "release": "1.el10", "source": "rpm", "version": "3.4.2" } ], "python3-configobj": [ { "arch": "noarch", "epoch": null, "name": "python3-configobj", "release": "10.el10", "source": "rpm", "version": "5.0.8" } ], "python3-configshell": [ { "arch": "noarch", "epoch": 1, "name": "python3-configshell", "release": "9.el10", "source": "rpm", "version": "1.1.30" } ], "python3-dasbus": [ { "arch": "noarch", "epoch": null, "name": "python3-dasbus", "release": "8.el10", "source": "rpm", "version": "1.7" } ], "python3-dateutil": [ { "arch": "noarch", "epoch": 1, "name": "python3-dateutil", "release": "1.el10", "source": "rpm", "version": "2.9.0.post0" } ], "python3-dbus": [ { "arch": "x86_64", "epoch": null, "name": "python3-dbus", "release": "8.el10", "source": "rpm", "version": "1.3.2" } ], "python3-dbus-client-gen": [ { "arch": "noarch", "epoch": null, "name": "python3-dbus-client-gen", "release": "8.el10", "source": "rpm", "version": "0.5.1" } ], "python3-dbus-python-client-gen": [ { "arch": "noarch", "epoch": null, "name": "python3-dbus-python-client-gen", "release": "7.el10", "source": "rpm", "version": "0.8.3" } ], "python3-dbus-signature-pyparsing": [ { "arch": "noarch", "epoch": null, "name": "python3-dbus-signature-pyparsing", "release": "9.el10", "source": "rpm", "version": "0.4.1" } ], "python3-distro": [ { "arch": "noarch", "epoch": null, "name": "python3-distro", "release": "5.el10", "source": "rpm", "version": "1.9.0" } ], "python3-dnf": [ { "arch": "noarch", "epoch": null, "name": "python3-dnf", "release": "21.el10", "source": "rpm", "version": "4.20.0" } ], "python3-dnf-plugins-core": [ { "arch": "noarch", "epoch": null, "name": "python3-dnf-plugins-core", "release": "10.el10", "source": "rpm", "version": "4.7.0" } ], "python3-firewall": [ { "arch": "noarch", "epoch": null, "name": "python3-firewall", "release": "1.el10", "source": "rpm", "version": "2.4.0" } ], "python3-gobject-base": [ { "arch": "x86_64", "epoch": null, "name": "python3-gobject-base", "release": "7.el10", "source": "rpm", "version": "3.46.0" } ], "python3-gobject-base-noarch": [ { "arch": "noarch", "epoch": null, "name": "python3-gobject-base-noarch", "release": "7.el10", "source": "rpm", "version": "3.46.0" } ], "python3-hawkey": [ { "arch": "x86_64", "epoch": null, "name": "python3-hawkey", "release": "14.el10", "source": "rpm", "version": "0.73.1" } ], "python3-idna": [ { "arch": "noarch", "epoch": null, "name": "python3-idna", "release": "4.el10", "source": "rpm", "version": "3.7" } ], "python3-into-dbus-python": [ { "arch": "noarch", "epoch": null, "name": "python3-into-dbus-python", "release": "7.el10", "source": "rpm", "version": "0.8.2" } ], "python3-jinja2": [ { "arch": "noarch", "epoch": null, "name": "python3-jinja2", "release": "1.el10", "source": "rpm", "version": "3.1.6" } ], "python3-jsonpatch": [ { "arch": "noarch", "epoch": null, "name": "python3-jsonpatch", "release": "6.el10", "source": "rpm", "version": "1.33" } ], "python3-jsonpointer": [ { "arch": "noarch", "epoch": null, "name": "python3-jsonpointer", "release": "9.el10", "source": "rpm", "version": "2.3" } ], "python3-jsonschema": [ { "arch": "noarch", "epoch": null, "name": "python3-jsonschema", "release": "7.el10", "source": "rpm", "version": "4.19.1" } ], "python3-jsonschema-specifications": [ { "arch": "noarch", "epoch": null, "name": "python3-jsonschema-specifications", "release": "6.el10", "source": "rpm", "version": "2023.11.2" } ], "python3-justbases": [ { "arch": "noarch", "epoch": null, "name": "python3-justbases", "release": "9.el10", "source": "rpm", "version": "0.15.2" } ], "python3-justbytes": [ { "arch": "noarch", "epoch": null, "name": "python3-justbytes", "release": "7.el10", "source": "rpm", "version": "0.15.2" } ], "python3-kmod": [ { "arch": "x86_64", "epoch": null, "name": "python3-kmod", "release": "6.el10", "source": "rpm", "version": "0.9.2" } ], "python3-libcomps": [ { "arch": "x86_64", "epoch": null, "name": "python3-libcomps", "release": "3.el10", "source": "rpm", "version": "0.1.21" } ], "python3-libdnf": [ { "arch": "x86_64", "epoch": null, "name": "python3-libdnf", "release": "14.el10", "source": "rpm", "version": "0.73.1" } ], "python3-libmount": [ { "arch": "x86_64", "epoch": null, "name": "python3-libmount", "release": "19.el10", "source": "rpm", "version": "2.40.2" } ], "python3-libs": [ { "arch": "x86_64", "epoch": null, "name": "python3-libs", "release": "5.el10", "source": "rpm", "version": "3.12.12" } ], "python3-libselinux": [ { "arch": "x86_64", "epoch": null, "name": "python3-libselinux", "release": "1.el10", "source": "rpm", "version": "3.10" } ], "python3-libsemanage": [ { "arch": "x86_64", "epoch": null, "name": "python3-libsemanage", "release": "1.el10", "source": "rpm", "version": "3.10" } ], "python3-lxml": [ { "arch": "x86_64", "epoch": null, "name": "python3-lxml", "release": "4.el10", "source": "rpm", "version": "5.2.1" } ], "python3-markupsafe": [ { "arch": "x86_64", "epoch": null, "name": "python3-markupsafe", "release": "6.el10", "source": "rpm", "version": "2.1.3" } ], "python3-nftables": [ { "arch": "x86_64", "epoch": 1, "name": "python3-nftables", "release": "3.el10", "source": "rpm", "version": "1.1.5" } ], "python3-oauthlib": [ { "arch": "noarch", "epoch": null, "name": "python3-oauthlib", "release": "6.el10", "source": "rpm", "version": "3.2.2" } ], "python3-packaging": [ { "arch": "noarch", "epoch": null, "name": "python3-packaging", "release": "2.el10", "source": "rpm", "version": "24.2" } ], "python3-pip-wheel": [ { "arch": "noarch", "epoch": null, "name": "python3-pip-wheel", "release": "7.el10", "source": "rpm", "version": "23.3.2" } ], "python3-policycoreutils": [ { "arch": "noarch", "epoch": null, "name": "python3-policycoreutils", "release": "1.el10", "source": "rpm", "version": "3.10" } ], "python3-psutil": [ { "arch": "x86_64", "epoch": null, "name": "python3-psutil", "release": "6.el10", "source": "rpm", "version": "5.9.8" } ], "python3-pyparsing": [ { "arch": "noarch", "epoch": null, "name": "python3-pyparsing", "release": "7.el10", "source": "rpm", "version": "3.1.1" } ], "python3-pyparted": [ { "arch": "x86_64", "epoch": 1, "name": "python3-pyparted", "release": "7.el10", "source": "rpm", "version": "3.13.0" } ], "python3-pyserial": [ { "arch": "noarch", "epoch": null, "name": "python3-pyserial", "release": "11.el10", "source": "rpm", "version": "3.5" } ], "python3-pyudev": [ { "arch": "noarch", "epoch": null, "name": "python3-pyudev", "release": "10.el10", "source": "rpm", "version": "0.24.1" } ], "python3-pyyaml": [ { "arch": "x86_64", "epoch": null, "name": "python3-pyyaml", "release": "19.el10", "source": "rpm", "version": "6.0.1" } ], "python3-referencing": [ { "arch": "noarch", "epoch": null, "name": "python3-referencing", "release": "6.el10", "source": "rpm", "version": "0.31.1" } ], "python3-requests": [ { "arch": "noarch", "epoch": null, "name": "python3-requests", "release": "1.el10", "source": "rpm", "version": "2.32.4" } ], "python3-rpds-py": [ { "arch": "x86_64", "epoch": null, "name": "python3-rpds-py", "release": "6.el10", "source": "rpm", "version": "0.17.1" } ], "python3-rpm": [ { "arch": "x86_64", "epoch": null, "name": "python3-rpm", "release": "23.el10", "source": "rpm", "version": "4.19.1.1" } ], "python3-rtslib": [ { "arch": "noarch", "epoch": null, "name": "python3-rtslib", "release": "12.el10", "source": "rpm", "version": "2.1.76" } ], "python3-setools": [ { "arch": "x86_64", "epoch": null, "name": "python3-setools", "release": "3.el10", "source": "rpm", "version": "4.6.0" } ], "python3-six": [ { "arch": "noarch", "epoch": null, "name": "python3-six", "release": "16.el10", "source": "rpm", "version": "1.16.0" } ], "python3-systemd": [ { "arch": "x86_64", "epoch": null, "name": "python3-systemd", "release": "11.el10", "source": "rpm", "version": "235" } ], "python3-typing-extensions": [ { "arch": "noarch", "epoch": null, "name": "python3-typing-extensions", "release": "6.el10", "source": "rpm", "version": "4.9.0" } ], "python3-urllib3": [ { "arch": "noarch", "epoch": null, "name": "python3-urllib3", "release": "3.el10", "source": "rpm", "version": "1.26.19" } ], "python3-urwid": [ { "arch": "x86_64", "epoch": null, "name": "python3-urwid", "release": "4.el10", "source": "rpm", "version": "2.5.3" } ], "python3-wcwidth": [ { "arch": "noarch", "epoch": null, "name": "python3-wcwidth", "release": "6.el10", "source": "rpm", "version": "0.2.6" } ], "qa-tools": [ { "arch": "noarch", "epoch": null, "name": "qa-tools", "release": "6.el10", "source": "rpm", "version": "4.1" } ], "qemu-guest-agent": [ { "arch": "x86_64", "epoch": 18, "name": "qemu-guest-agent", "release": "13.el10", "source": "rpm", "version": "10.1.0" } ], "quota": [ { "arch": "x86_64", "epoch": 1, "name": "quota", "release": "10.el10", "source": "rpm", "version": "4.09" } ], "quota-nls": [ { "arch": "noarch", "epoch": 1, "name": "quota-nls", "release": "10.el10", "source": "rpm", "version": "4.09" } ], "readline": [ { "arch": "x86_64", "epoch": null, "name": "readline", "release": "11.el10", "source": "rpm", "version": "8.2" } ], "realtek-firmware": [ { "arch": "noarch", "epoch": null, "name": "realtek-firmware", "release": "22.el10", "source": "rpm", "version": "20260130" } ], "redhat-mono-vf-fonts": [ { "arch": "noarch", "epoch": null, "name": "redhat-mono-vf-fonts", "release": "1.el10", "source": "rpm", "version": "4.1.0" } ], "redhat-text-vf-fonts": [ { "arch": "noarch", "epoch": null, "name": "redhat-text-vf-fonts", "release": "1.el10", "source": "rpm", "version": "4.1.0" } ], "restraint": [ { "arch": "x86_64", "epoch": null, "name": "restraint", "release": "1.el10", "source": "rpm", "version": "0.4.14" } ], "restraint-rhts": [ { "arch": "x86_64", "epoch": null, "name": "restraint-rhts", "release": "1.el10", "source": "rpm", "version": "0.4.14" } ], "rng-tools": [ { "arch": "x86_64", "epoch": null, "name": "rng-tools", "release": "5.el10", "source": "rpm", "version": "6.17" } ], "rootfiles": [ { "arch": "noarch", "epoch": null, "name": "rootfiles", "release": "54.el10", "source": "rpm", "version": "8.1" } ], "rpcbind": [ { "arch": "x86_64", "epoch": null, "name": "rpcbind", "release": "3.el10", "source": "rpm", "version": "1.2.7" } ], "rpm": [ { "arch": "x86_64", "epoch": null, "name": "rpm", "release": "23.el10", "source": "rpm", "version": "4.19.1.1" } ], "rpm-build-libs": [ { "arch": "x86_64", "epoch": null, "name": "rpm-build-libs", "release": "23.el10", "source": "rpm", "version": "4.19.1.1" } ], "rpm-libs": [ { "arch": "x86_64", "epoch": null, "name": "rpm-libs", "release": "23.el10", "source": "rpm", "version": "4.19.1.1" } ], "rpm-plugin-audit": [ { "arch": "x86_64", "epoch": null, "name": "rpm-plugin-audit", "release": "23.el10", "source": "rpm", "version": "4.19.1.1" } ], "rpm-plugin-selinux": [ { "arch": "x86_64", "epoch": null, "name": "rpm-plugin-selinux", "release": "23.el10", "source": "rpm", "version": "4.19.1.1" } ], "rpm-plugin-systemd-inhibit": [ { "arch": "x86_64", "epoch": null, "name": "rpm-plugin-systemd-inhibit", "release": "23.el10", "source": "rpm", "version": "4.19.1.1" } ], "rpm-sequoia": [ { "arch": "x86_64", "epoch": null, "name": "rpm-sequoia", "release": "2.el10", "source": "rpm", "version": "1.10.1.1" } ], "rpm-sign-libs": [ { "arch": "x86_64", "epoch": null, "name": "rpm-sign-libs", "release": "23.el10", "source": "rpm", "version": "4.19.1.1" } ], "rsync": [ { "arch": "x86_64", "epoch": null, "name": "rsync", "release": "3.el10", "source": "rpm", "version": "3.4.1" } ], "rsyslog": [ { "arch": "x86_64", "epoch": null, "name": "rsyslog", "release": "5.el10", "source": "rpm", "version": "8.2510.0" } ], "sed": [ { "arch": "x86_64", "epoch": null, "name": "sed", "release": "5.el10", "source": "rpm", "version": "4.9" } ], "selinux-policy": [ { "arch": "noarch", "epoch": null, "name": "selinux-policy", "release": "2.el10", "source": "rpm", "version": "42.1.18" } ], "selinux-policy-targeted": [ { "arch": "noarch", "epoch": null, "name": "selinux-policy-targeted", "release": "2.el10", "source": "rpm", "version": "42.1.18" } ], "sequoia-sq": [ { "arch": "x86_64", "epoch": null, "name": "sequoia-sq", "release": "1.el10", "source": "rpm", "version": "1.3.1.1" } ], "setup": [ { "arch": "noarch", "epoch": null, "name": "setup", "release": "7.el10", "source": "rpm", "version": "2.14.5" } ], "sg3_utils": [ { "arch": "x86_64", "epoch": null, "name": "sg3_utils", "release": "7.el10", "source": "rpm", "version": "1.48" } ], "sg3_utils-libs": [ { "arch": "x86_64", "epoch": null, "name": "sg3_utils-libs", "release": "7.el10", "source": "rpm", "version": "1.48" } ], "shadow-utils": [ { "arch": "x86_64", "epoch": 2, "name": "shadow-utils", "release": "11.el10", "source": "rpm", "version": "4.15.0" } ], "slang": [ { "arch": "x86_64", "epoch": null, "name": "slang", "release": "8.el10", "source": "rpm", "version": "2.3.3" } ], "snappy": [ { "arch": "x86_64", "epoch": null, "name": "snappy", "release": "7.el10", "source": "rpm", "version": "1.1.10" } ], "sqlite-libs": [ { "arch": "x86_64", "epoch": null, "name": "sqlite-libs", "release": "5.el10", "source": "rpm", "version": "3.46.1" } ], "sssd-client": [ { "arch": "x86_64", "epoch": null, "name": "sssd-client", "release": "1.el10", "source": "rpm", "version": "2.12.0" } ], "sssd-common": [ { "arch": "x86_64", "epoch": null, "name": "sssd-common", "release": "1.el10", "source": "rpm", "version": "2.12.0" } ], "sssd-kcm": [ { "arch": "x86_64", "epoch": null, "name": "sssd-kcm", "release": "1.el10", "source": "rpm", "version": "2.12.0" } ], "sssd-krb5-common": [ { "arch": "x86_64", "epoch": null, "name": "sssd-krb5-common", "release": "1.el10", "source": "rpm", "version": "2.12.0" } ], "sssd-nfs-idmap": [ { "arch": "x86_64", "epoch": null, "name": "sssd-nfs-idmap", "release": "1.el10", "source": "rpm", "version": "2.12.0" } ], "strace": [ { "arch": "x86_64", "epoch": null, "name": "strace", "release": "3.el10", "source": "rpm", "version": "6.12" } ], "stratis-cli": [ { "arch": "noarch", "epoch": null, "name": "stratis-cli", "release": "2.el10", "source": "rpm", "version": "3.8.3" } ], "stratisd": [ { "arch": "x86_64", "epoch": null, "name": "stratisd", "release": "2.el10", "source": "rpm", "version": "3.8.6" } ], "sudo": [ { "arch": "x86_64", "epoch": null, "name": "sudo", "release": "2.p2.el10", "source": "rpm", "version": "1.9.17" } ], "sudo-python-plugin": [ { "arch": "x86_64", "epoch": null, "name": "sudo-python-plugin", "release": "2.p2.el10", "source": "rpm", "version": "1.9.17" } ], "systemd": [ { "arch": "x86_64", "epoch": null, "name": "systemd", "release": "23.el10", "source": "rpm", "version": "257" } ], "systemd-libs": [ { "arch": "x86_64", "epoch": null, "name": "systemd-libs", "release": "23.el10", "source": "rpm", "version": "257" } ], "systemd-pam": [ { "arch": "x86_64", "epoch": null, "name": "systemd-pam", "release": "23.el10", "source": "rpm", "version": "257" } ], "systemd-udev": [ { "arch": "x86_64", "epoch": null, "name": "systemd-udev", "release": "23.el10", "source": "rpm", "version": "257" } ], "systemtap": [ { "arch": "x86_64", "epoch": null, "name": "systemtap", "release": "3.el10", "source": "rpm", "version": "5.4" } ], "systemtap-client": [ { "arch": "x86_64", "epoch": null, "name": "systemtap-client", "release": "3.el10", "source": "rpm", "version": "5.4" } ], "systemtap-devel": [ { "arch": "x86_64", "epoch": null, "name": "systemtap-devel", "release": "3.el10", "source": "rpm", "version": "5.4" } ], "systemtap-runtime": [ { "arch": "x86_64", "epoch": null, "name": "systemtap-runtime", "release": "3.el10", "source": "rpm", "version": "5.4" } ], "tar": [ { "arch": "x86_64", "epoch": 2, "name": "tar", "release": "11.el10", "source": "rpm", "version": "1.35" } ], "target-restore": [ { "arch": "noarch", "epoch": null, "name": "target-restore", "release": "12.el10", "source": "rpm", "version": "2.1.76" } ], "targetcli": [ { "arch": "noarch", "epoch": null, "name": "targetcli", "release": "5.el10", "source": "rpm", "version": "2.1.58" } ], "tbb": [ { "arch": "x86_64", "epoch": null, "name": "tbb", "release": "7.el10", "source": "rpm", "version": "2021.11.0" } ], "time": [ { "arch": "x86_64", "epoch": null, "name": "time", "release": "25.el10", "source": "rpm", "version": "1.9" } ], "tiwilink-firmware": [ { "arch": "noarch", "epoch": null, "name": "tiwilink-firmware", "release": "22.el10", "source": "rpm", "version": "20260130" } ], "tpm2-tools": [ { "arch": "x86_64", "epoch": null, "name": "tpm2-tools", "release": "5.el10", "source": "rpm", "version": "5.7" } ], "tpm2-tss": [ { "arch": "x86_64", "epoch": null, "name": "tpm2-tss", "release": "6.el10", "source": "rpm", "version": "4.1.3" } ], "tpm2-tss-fapi": [ { "arch": "x86_64", "epoch": null, "name": "tpm2-tss-fapi", "release": "6.el10", "source": "rpm", "version": "4.1.3" } ], "tzdata": [ { "arch": "noarch", "epoch": null, "name": "tzdata", "release": "1.el10", "source": "rpm", "version": "2026a" } ], "unzip": [ { "arch": "x86_64", "epoch": null, "name": "unzip", "release": "69.el10", "source": "rpm", "version": "6.0" } ], "userspace-rcu": [ { "arch": "x86_64", "epoch": null, "name": "userspace-rcu", "release": "7.el10", "source": "rpm", "version": "0.14.0" } ], "util-linux": [ { "arch": "x86_64", "epoch": null, "name": "util-linux", "release": "19.el10", "source": "rpm", "version": "2.40.2" } ], "util-linux-core": [ { "arch": "x86_64", "epoch": null, "name": "util-linux-core", "release": "19.el10", "source": "rpm", "version": "2.40.2" } ], "vdo": [ { "arch": "x86_64", "epoch": null, "name": "vdo", "release": "1.el10", "source": "rpm", "version": "8.3.2.1" } ], "vim-common": [ { "arch": "x86_64", "epoch": 2, "name": "vim-common", "release": "9.el10", "source": "rpm", "version": "9.1.083" } ], "vim-data": [ { "arch": "noarch", "epoch": 2, "name": "vim-data", "release": "9.el10", "source": "rpm", "version": "9.1.083" } ], "vim-enhanced": [ { "arch": "x86_64", "epoch": 2, "name": "vim-enhanced", "release": "9.el10", "source": "rpm", "version": "9.1.083" } ], "vim-filesystem": [ { "arch": "noarch", "epoch": 2, "name": "vim-filesystem", "release": "9.el10", "source": "rpm", "version": "9.1.083" } ], "vim-minimal": [ { "arch": "x86_64", "epoch": 2, "name": "vim-minimal", "release": "9.el10", "source": "rpm", "version": "9.1.083" } ], "volume_key-libs": [ { "arch": "x86_64", "epoch": null, "name": "volume_key-libs", "release": "26.el10", "source": "rpm", "version": "0.3.12" } ], "wget": [ { "arch": "x86_64", "epoch": null, "name": "wget", "release": "5.el10", "source": "rpm", "version": "1.24.5" } ], "which": [ { "arch": "x86_64", "epoch": null, "name": "which", "release": "44.el10", "source": "rpm", "version": "2.21" } ], "xfsprogs": [ { "arch": "x86_64", "epoch": null, "name": "xfsprogs", "release": "1.el10", "source": "rpm", "version": "6.16.0" } ], "xkeyboard-config": [ { "arch": "noarch", "epoch": null, "name": "xkeyboard-config", "release": "3.el10", "source": "rpm", "version": "2.41" } ], "xxd": [ { "arch": "x86_64", "epoch": 2, "name": "xxd", "release": "9.el10", "source": "rpm", "version": "9.1.083" } ], "xz": [ { "arch": "x86_64", "epoch": 1, "name": "xz", "release": "4.el10", "source": "rpm", "version": "5.6.2" } ], "xz-devel": [ { "arch": "x86_64", "epoch": 1, "name": "xz-devel", "release": "4.el10", "source": "rpm", "version": "5.6.2" } ], "xz-libs": [ { "arch": "x86_64", "epoch": 1, "name": "xz-libs", "release": "4.el10", "source": "rpm", "version": "5.6.2" } ], "yum": [ { "arch": "noarch", "epoch": null, "name": "yum", "release": "21.el10", "source": "rpm", "version": "4.20.0" } ], "yum-utils": [ { "arch": "noarch", "epoch": null, "name": "yum-utils", "release": "10.el10", "source": "rpm", "version": "4.7.0" } ], "zip": [ { "arch": "x86_64", "epoch": null, "name": "zip", "release": "45.el10", "source": "rpm", "version": "3.0" } ], "zlib-ng-compat": [ { "arch": "x86_64", "epoch": null, "name": "zlib-ng-compat", "release": "3.el10", "source": "rpm", "version": "2.2.3" } ], "zlib-ng-compat-devel": [ { "arch": "x86_64", "epoch": null, "name": "zlib-ng-compat-devel", "release": "3.el10", "source": "rpm", "version": "2.2.3" } ] } }, "changed": false } TASK [Set blivet package name] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:26 Saturday 28 March 2026 19:23:07 -0400 (0:00:06.919) 0:00:42.689 ******** ok: [managed-node12] => { "ansible_facts": { "blivet_pkg_name": [ "python3-blivet" ] }, "changed": false } TASK [Set blivet package version] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:30 Saturday 28 March 2026 19:23:08 -0400 (0:00:00.496) 0:00:43.185 ******** ok: [managed-node12] => { "ansible_facts": { "blivet_pkg_version": "3.13.0-5.el10" }, "changed": false } TASK [Set distribution version] ************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:34 Saturday 28 March 2026 19:23:08 -0400 (0:00:00.452) 0:00:43.648 ******** ok: [managed-node12] => { "ansible_facts": { "is_fedora": false, "is_rhel10": true, "is_rhel78": false, "is_rhel9": false }, "changed": false } TASK [Get unused disks] ******************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:44 Saturday 28 March 2026 19:23:09 -0400 (0:00:00.625) 0:00:44.273 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml for managed-node12 TASK [Ensure test packages] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:2 Saturday 28 March 2026 19:23:09 -0400 (0:00:00.593) 0:00:44.867 ******** ok: [managed-node12] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: util-linux-core TASK [Find unused disks in the system] ***************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:11 Saturday 28 March 2026 19:23:11 -0400 (0:00:01.682) 0:00:46.549 ******** ok: [managed-node12] => { "changed": false, "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "info": [ "Line: NAME=\"/dev/sda\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdb\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdc\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdd\" TYPE=\"disk\" SIZE=\"1099511627776\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sde\" TYPE=\"disk\" SIZE=\"1099511627776\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdf\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdg\" TYPE=\"disk\" SIZE=\"1099511627776\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdh\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/sdi\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/xvda\" TYPE=\"disk\" SIZE=\"268435456000\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/xvda1\" TYPE=\"part\" SIZE=\"1048576\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line type [part] is not disk: NAME=\"/dev/xvda1\" TYPE=\"part\" SIZE=\"1048576\" FSTYPE=\"\" LOG-SEC=\"512\"", "Line: NAME=\"/dev/xvda2\" TYPE=\"part\" SIZE=\"268433341952\" FSTYPE=\"xfs\" LOG-SEC=\"512\"", "Line type [part] is not disk: NAME=\"/dev/xvda2\" TYPE=\"part\" SIZE=\"268433341952\" FSTYPE=\"xfs\" LOG-SEC=\"512\"", "filename [xvda2] is a partition", "filename [xvda1] is a partition", "Disk [/dev/xvda] attrs [{'type': 'disk', 'size': '268435456000', 'fstype': '', 'ssize': '512'}] has partitions" ] } TASK [Debug why there are no unused disks] ************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:20 Saturday 28 March 2026 19:23:14 -0400 (0:00:02.684) 0:00:49.239 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "'Unable to find unused disk' in unused_disks_return.disks", "skip_reason": "Conditional result was False" } TASK [Set unused_disks if necessary] ******************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:29 Saturday 28 March 2026 19:23:14 -0400 (0:00:00.309) 0:00:49.548 ******** ok: [managed-node12] => { "ansible_facts": { "unused_disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ] }, "changed": false } TASK [Exit playbook when there's not enough unused disks in the system] ******** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:34 Saturday 28 March 2026 19:23:14 -0400 (0:00:00.356) 0:00:49.905 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "unused_disks | d([]) | length < disks_needed | d(1)", "skip_reason": "Conditional result was False" } TASK [Print unused disks] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/get_unused_disk.yml:39 Saturday 28 March 2026 19:23:15 -0400 (0:00:00.415) 0:00:50.321 ******** ok: [managed-node12] => { "unused_disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ] } TASK [Start stratisd service] ************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:52 Saturday 28 March 2026 19:23:16 -0400 (0:00:00.799) 0:00:51.121 ******** changed: [managed-node12] => { "changed": true, "name": "stratisd", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "systemd-journald.socket dbus.socket system.slice multi-user.target", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.storage.stratis3", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "no", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "Stratis daemon", "DevicePolicy": "auto", "Documentation": "\"man:stratisd(8)\"", "DynamicUser": "no", "EffectiveMemoryHigh": "3630931968", "EffectiveMemoryMax": "3630931968", "EffectiveTasksMax": "21802", "Environment": "RUST_BACKTRACE=1", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/libexec/stratisd ; argv[]=/usr/libexec/stratisd --log-level debug ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/libexec/stratisd ; argv[]=/usr/libexec/stratisd --log-level debug ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/stratisd.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "stratisd.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "process", "KillSignal": "2", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13626", "LimitNPROCSoft": "13626", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13626", "LimitSIGPENDINGSoft": "13626", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3157286912", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "stratisd.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice dbus.socket", "Restart": "on-abort", "RestartKillSignal": "2", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2026-03-28 18:43:00 EDT", "StateChangeTimestampMonotonic": "486843340", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "21802", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [Create one Stratis pool with one volume] ********************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:60 Saturday 28 March 2026 19:23:20 -0400 (0:00:04.866) 0:00:55.987 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tasks/run_role_with_clear_facts.yml for managed-node12 TASK [Clear facts] ************************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tasks/run_role_with_clear_facts.yml:10 Saturday 28 March 2026 19:23:21 -0400 (0:00:00.475) 0:00:56.462 ******** META: facts cleared TASK [Run the role] ************************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tasks/run_role_with_clear_facts.yml:24 Saturday 28 March 2026 19:23:21 -0400 (0:00:00.104) 0:00:56.566 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "__sr_failed_when is defined", "skip_reason": "Conditional result was False" } TASK [Run the role normally] *************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tasks/run_role_with_clear_facts.yml:34 Saturday 28 March 2026 19:23:21 -0400 (0:00:00.177) 0:00:56.744 ******** included: fedora.linux_system_roles.storage for managed-node12 TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Saturday 28 March 2026 19:23:21 -0400 (0:00:00.353) 0:00:57.098 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node12 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Saturday 28 March 2026 19:23:22 -0400 (0:00:00.277) 0:00:57.375 ******** ok: [managed-node12] TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Saturday 28 March 2026 19:23:24 -0400 (0:00:02.113) 0:00:59.489 ******** skipping: [managed-node12] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node12] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node12] => (item=CentOS_10.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-fs", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "xfsprogs", "stratisd", "stratis-cli", "{{ 'libblockdev-s390' if ansible_facts['architecture'] == 's390x' else 'libblockdev' }}", "vdo" ] }, "ansible_included_var_files": [ "/tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node12] => (item=CentOS_10.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-fs", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "xfsprogs", "stratisd", "stratis-cli", "{{ 'libblockdev-s390' if ansible_facts['architecture'] == 's390x' else 'libblockdev' }}", "vdo" ] }, "ansible_included_var_files": [ "/tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Saturday 28 March 2026 19:23:24 -0400 (0:00:00.579) 0:01:00.069 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "not __storage_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Saturday 28 March 2026 19:23:25 -0400 (0:00:00.279) 0:01:00.348 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "not __storage_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Saturday 28 March 2026 19:23:25 -0400 (0:00:00.281) 0:01:00.629 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Saturday 28 March 2026 19:23:25 -0400 (0:00:00.266) 0:01:00.895 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Saturday 28 March 2026 19:23:26 -0400 (0:00:00.255) 0:01:01.151 ******** redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node12 TASK [fedora.linux_system_roles.storage : Add repo key] ************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Saturday 28 March 2026 19:23:26 -0400 (0:00:00.631) 0:01:01.782 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_blivet_custom_repo.key is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Add blivet repo] ********************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:15 Saturday 28 March 2026 19:23:26 -0400 (0:00:00.277) 0:01:02.059 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_blivet_custom_repo.key is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:20 Saturday 28 March 2026 19:23:27 -0400 (0:00:00.221) 0:01:02.281 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:27 Saturday 28 March 2026 19:23:27 -0400 (0:00:00.184) 0:01:02.466 ******** ok: [managed-node12] => { "storage_pools | d([])": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "name": "foo", "type": "stratis", "volumes": [ { "mount_point": "/opt/test1", "name": "test1", "size": "4g" } ] } ] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:32 Saturday 28 March 2026 19:23:27 -0400 (0:00:00.259) 0:01:02.725 ******** ok: [managed-node12] => { "storage_volumes | d([])": [] } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37 Saturday 28 March 2026 19:23:27 -0400 (0:00:00.219) 0:01:02.945 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:50 Saturday 28 March 2026 19:23:28 -0400 (0:00:00.220) 0:01:03.165 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:56 Saturday 28 March 2026 19:23:28 -0400 (0:00:00.232) 0:01:03.398 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:76 Saturday 28 March 2026 19:23:28 -0400 (0:00:00.257) 0:01:03.655 ******** ok: [managed-node12] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "apt-daily.service": { "name": "apt-daily.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fcoe.service": { "name": "fcoe.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "iscsi-shutdown.service": { "name": "iscsi-shutdown.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsi.service": { "name": "iscsi.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsid.service": { "name": "iscsid.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-activation-early.service": { "name": "lvm2-activation-early.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_multipath.service": { "name": "modprobe@dm_multipath.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "raid-check.service": { "name": "raid-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "rbdmap.service": { "name": "rbdmap.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-unix-local@.service": { "name": "sshd-unix-local@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd-vsock@.service": { "name": "sshd-vsock@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "stratis-fstab-setup-with-network@.service": { "name": "stratis-fstab-setup-with-network@.service", "source": "systemd", "state": "unknown", "status": "static" }, "stratis-fstab-setup@.service": { "name": "stratis-fstab-setup@.service", "source": "systemd", "state": "unknown", "status": "static" }, "stratisd-min-postinitrd.service": { "name": "stratisd-min-postinitrd.service", "source": "systemd", "state": "inactive", "status": "static" }, "stratisd.service": { "name": "stratisd.service", "source": "systemd", "state": "running", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-userdbd.service": { "name": "systemd-userdbd.service", "source": "systemd", "state": "running", "status": "indirect" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:82 Saturday 28 March 2026 19:23:31 -0400 (0:00:03.123) 0:01:06.778 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:88 Saturday 28 March 2026 19:23:32 -0400 (0:00:00.429) 0:01:07.207 ******** changed: [managed-node12] => { "actions": [ { "action": "create format", "device": "/dev/sdi", "fs_type": "stratis" }, { "action": "create format", "device": "/dev/sdh", "fs_type": "stratis" }, { "action": "create format", "device": "/dev/sdg", "fs_type": "stratis" }, { "action": "create format", "device": "/dev/sdf", "fs_type": "stratis" }, { "action": "create format", "device": "/dev/sde", "fs_type": "stratis" }, { "action": "create format", "device": "/dev/sdd", "fs_type": "stratis" }, { "action": "create format", "device": "/dev/sdc", "fs_type": "stratis" }, { "action": "create format", "device": "/dev/sdb", "fs_type": "stratis" }, { "action": "create format", "device": "/dev/sda", "fs_type": "stratis" }, { "action": "create device", "device": "/dev/stratis/foo", "fs_type": null }, { "action": "create device", "device": "/dev/stratis/foo/test1", "fs_type": null }, { "action": "create format", "device": "/dev/stratis/foo/test1", "fs_type": "stratis xfs" } ], "changed": true, "crypts": [], "leaves": [ "/dev/xvda1", "/dev/xvda2", "/dev/stratis/foo/test1" ], "mounts": [ { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "UUID=38081a08-cf75-4f9b-b424-e901b7e9592f", "state": "mounted" } ], "packages": [ "stratis-cli", "stratisd", "xfsprogs" ], "pools": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "stratis", "volumes": [ { "_device": "/dev/stratis/foo/test1", "_kernel_device": "/dev/dm-6", "_mount_id": "UUID=38081a08-cf75-4f9b-b424-e901b7e9592f", "_raw_device": "/dev/stratis/foo/test1", "_raw_kernel_device": "/dev/dm-6", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:103 Saturday 28 March 2026 19:23:52 -0400 (0:00:20.268) 0:01:27.476 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_udevadm_trigger | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ****** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:110 Saturday 28 March 2026 19:23:52 -0400 (0:00:00.262) 0:01:27.738 ******** ok: [managed-node12] => { "changed": false, "stat": { "atime": 1774740174.7914975, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "6aeccfbb3223a634b983c3c21792c1ba90809bb8", "ctime": 1774740128.421465, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 230686985, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1774740128.421465, "nlink": 1, "path": "/etc/fstab", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1408, "uid": 0, "version": "1679064280", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:115 Saturday 28 March 2026 19:23:53 -0400 (0:00:01.050) 0:01:28.789 ******** ok: [managed-node12] => { "backup": "", "changed": false } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:133 Saturday 28 March 2026 19:23:56 -0400 (0:00:02.372) 0:01:31.161 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:139 Saturday 28 March 2026 19:23:56 -0400 (0:00:00.414) 0:01:31.575 ******** ok: [managed-node12] => { "blivet_output": { "actions": [ { "action": "create format", "device": "/dev/sdi", "fs_type": "stratis" }, { "action": "create format", "device": "/dev/sdh", "fs_type": "stratis" }, { "action": "create format", "device": "/dev/sdg", "fs_type": "stratis" }, { "action": "create format", "device": "/dev/sdf", "fs_type": "stratis" }, { "action": "create format", "device": "/dev/sde", "fs_type": "stratis" }, { "action": "create format", "device": "/dev/sdd", "fs_type": "stratis" }, { "action": "create format", "device": "/dev/sdc", "fs_type": "stratis" }, { "action": "create format", "device": "/dev/sdb", "fs_type": "stratis" }, { "action": "create format", "device": "/dev/sda", "fs_type": "stratis" }, { "action": "create device", "device": "/dev/stratis/foo", "fs_type": null }, { "action": "create device", "device": "/dev/stratis/foo/test1", "fs_type": null }, { "action": "create format", "device": "/dev/stratis/foo/test1", "fs_type": "stratis xfs" } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/xvda1", "/dev/xvda2", "/dev/stratis/foo/test1" ], "mounts": [ { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "UUID=38081a08-cf75-4f9b-b424-e901b7e9592f", "state": "mounted" } ], "packages": [ "stratis-cli", "stratisd", "xfsprogs" ], "pools": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "stratis", "volumes": [ { "_device": "/dev/stratis/foo/test1", "_kernel_device": "/dev/dm-6", "_mount_id": "UUID=38081a08-cf75-4f9b-b424-e901b7e9592f", "_raw_device": "/dev/stratis/foo/test1", "_raw_kernel_device": "/dev/dm-6", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148 Saturday 28 March 2026 19:23:56 -0400 (0:00:00.329) 0:01:31.905 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "stratis", "volumes": [ { "_device": "/dev/stratis/foo/test1", "_kernel_device": "/dev/dm-6", "_mount_id": "UUID=38081a08-cf75-4f9b-b424-e901b7e9592f", "_raw_device": "/dev/stratis/foo/test1", "_raw_kernel_device": "/dev/dm-6", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:152 Saturday 28 March 2026 19:23:57 -0400 (0:00:00.302) 0:01:32.207 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:168 Saturday 28 March 2026 19:23:57 -0400 (0:00:00.272) 0:01:32.480 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:179 Saturday 28 March 2026 19:23:57 -0400 (0:00:00.289) 0:01:32.769 ******** ok: [managed-node12] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:184 Saturday 28 March 2026 19:23:59 -0400 (0:00:01.523) 0:01:34.292 ******** redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount changed: [managed-node12] => (item={'src': 'UUID=38081a08-cf75-4f9b-b424-e901b7e9592f', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => { "ansible_loop_var": "mount_info", "backup_file": "", "boot": "yes", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "xfs", "mount_info": { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "UUID=38081a08-cf75-4f9b-b424-e901b7e9592f", "state": "mounted" }, "name": "/opt/test1", "opts": "defaults", "passno": "0", "src": "UUID=38081a08-cf75-4f9b-b424-e901b7e9592f" } TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195 Saturday 28 March 2026 19:24:01 -0400 (0:00:02.555) 0:01:36.848 ******** skipping: [managed-node12] => (item={'src': 'UUID=38081a08-cf75-4f9b-b424-e901b7e9592f', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => { "ansible_loop_var": "mount_info", "changed": false, "false_condition": "mount_info['owner'] != none or mount_info['group'] != none or mount_info['mode'] != none", "mount_info": { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "UUID=38081a08-cf75-4f9b-b424-e901b7e9592f", "state": "mounted" }, "skip_reason": "Conditional result was False" } skipping: [managed-node12] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:207 Saturday 28 March 2026 19:24:02 -0400 (0:00:00.345) 0:01:37.194 ******** ok: [managed-node12] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:215 Saturday 28 March 2026 19:24:03 -0400 (0:00:01.491) 0:01:38.685 ******** ok: [managed-node12] => { "changed": false, "stat": { "atime": 1774737762.6954868, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1774342704.475, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 4194435, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1774342323.629, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "1224473831", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:220 Saturday 28 March 2026 19:24:04 -0400 (0:00:00.991) 0:01:39.677 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:242 Saturday 28 March 2026 19:24:04 -0400 (0:00:00.182) 0:01:39.860 ******** ok: [managed-node12] TASK [Verify role results] ***************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:72 Saturday 28 March 2026 19:24:06 -0400 (0:00:01.984) 0:01:41.845 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node12 TASK [Print out pool information] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2 Saturday 28 March 2026 19:24:07 -0400 (0:00:00.390) 0:01:42.236 ******** ok: [managed-node12] => { "_storage_pools_list": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "stratis", "volumes": [ { "_device": "/dev/stratis/foo/test1", "_kernel_device": "/dev/dm-6", "_mount_id": "UUID=38081a08-cf75-4f9b-b424-e901b7e9592f", "_raw_device": "/dev/stratis/foo/test1", "_raw_kernel_device": "/dev/dm-6", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } ] } ] } TASK [Print out volume information] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7 Saturday 28 March 2026 19:24:07 -0400 (0:00:00.344) 0:01:42.580 ******** skipping: [managed-node12] => { "false_condition": "_storage_volumes_list | length > 0" } TASK [Collect info about the volumes.] ***************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15 Saturday 28 March 2026 19:24:07 -0400 (0:00:00.278) 0:01:42.859 ******** ok: [managed-node12] => { "changed": false, "info": { "/dev/loop0": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/loop0", "size": "0B", "type": "loop", "uuid": "" }, "/dev/mapper/stratis-1-private-98c86bd3ff4a4ace81fea7a7d8071f6f-flex-mdv": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/mapper/stratis-1-private-98c86bd3ff4a4ace81fea7a7d8071f6f-flex-mdv", "size": "512M", "type": "stratis", "uuid": "" }, "/dev/mapper/stratis-1-private-98c86bd3ff4a4ace81fea7a7d8071f6f-flex-thindata": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/mapper/stratis-1-private-98c86bd3ff4a4ace81fea7a7d8071f6f-flex-thindata", "size": "50G", "type": "stratis", "uuid": "" }, "/dev/mapper/stratis-1-private-98c86bd3ff4a4ace81fea7a7d8071f6f-flex-thinmeta": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/mapper/stratis-1-private-98c86bd3ff4a4ace81fea7a7d8071f6f-flex-thinmeta", "size": "786M", "type": "stratis", "uuid": "" }, "/dev/mapper/stratis-1-private-98c86bd3ff4a4ace81fea7a7d8071f6f-physical-cache": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/mapper/stratis-1-private-98c86bd3ff4a4ace81fea7a7d8071f6f-physical-cache", "size": "52.1G", "type": "stratis", "uuid": "" }, "/dev/mapper/stratis-1-private-98c86bd3ff4a4ace81fea7a7d8071f6f-physical-originsub": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/mapper/stratis-1-private-98c86bd3ff4a4ace81fea7a7d8071f6f-physical-originsub", "size": "52.1G", "type": "stratis", "uuid": "" }, "/dev/mapper/stratis-1-private-98c86bd3ff4a4ace81fea7a7d8071f6f-thinpool-pool": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/mapper/stratis-1-private-98c86bd3ff4a4ace81fea7a7d8071f6f-thinpool-pool", "size": "50G", "type": "stratis", "uuid": "" }, "/dev/sda": { "fstype": "stratis", "label": "", "mountpoint": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "26de8ef7-1afd-484c-8227-390830b9f791" }, "/dev/sdb": { "fstype": "stratis", "label": "", "mountpoint": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "c1b3a6f2-c13b-473a-8a6e-3ef837ed2f6e" }, "/dev/sdc": { "fstype": "stratis", "label": "", "mountpoint": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "41fb917c-951e-41b2-8c83-03a53bb3c710" }, "/dev/sdd": { "fstype": "stratis", "label": "", "mountpoint": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "926c9bd5-a194-4984-9b26-e2a194676c27" }, "/dev/sde": { "fstype": "stratis", "label": "", "mountpoint": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "755bef06-2819-4fe2-8faf-f6eee9e80010" }, "/dev/sdf": { "fstype": "stratis", "label": "", "mountpoint": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "9489957b-dcba-4642-bfad-4733daeb8844" }, "/dev/sdg": { "fstype": "stratis", "label": "", "mountpoint": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "9795169f-078b-4914-a09d-19e69dc3300e" }, "/dev/sdh": { "fstype": "stratis", "label": "", "mountpoint": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "b094d332-a66e-4a40-ab09-7ae241f2aeff" }, "/dev/sdi": { "fstype": "stratis", "label": "", "mountpoint": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "46644e71-1703-4adf-beee-bda271fc3b06" }, "/dev/stratis/foo/test1": { "fstype": "xfs", "label": "", "mountpoint": "/opt/test1", "name": "/dev/stratis/foo/test1", "size": "4G", "type": "stratis", "uuid": "38081a08-cf75-4f9b-b424-e901b7e9592f" }, "/dev/xvda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda1", "size": "1M", "type": "partition", "uuid": "" }, "/dev/xvda2": { "fstype": "xfs", "label": "", "mountpoint": "/", "name": "/dev/xvda2", "size": "250G", "type": "partition", "uuid": "94fc577e-f1df-44bb-8e86-63b9a68f8f7f" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20 Saturday 28 March 2026 19:24:10 -0400 (0:00:02.263) 0:01:45.123 ******** ok: [managed-node12] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.003026", "end": "2026-03-28 19:24:11.797469", "rc": 0, "start": "2026-03-28 19:24:11.794443" } STDOUT: # system_role:storage # # /etc/fstab # Created by anaconda on Tue Mar 24 08:52:03 2026 # # Accessible filesystems, by reference, are maintained under '/dev/disk/'. # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info. # # After editing this file, run 'systemctl daemon-reload' to update systemd # units generated from this file. # UUID=94fc577e-f1df-44bb-8e86-63b9a68f8f7f / xfs defaults 0 0 ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 UUID=38081a08-cf75-4f9b-b424-e901b7e9592f /opt/test1 xfs defaults 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25 Saturday 28 March 2026 19:24:12 -0400 (0:00:01.992) 0:01:47.116 ******** ok: [managed-node12] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.002930", "end": "2026-03-28 19:24:13.016534", "failed_when_result": false, "rc": 0, "start": "2026-03-28 19:24:13.013604" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34 Saturday 28 March 2026 19:24:13 -0400 (0:00:01.206) 0:01:48.322 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node12 => (item={'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'stratis', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'part_type': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=38081a08-cf75-4f9b-b424-e901b7e9592f', '_kernel_device': '/dev/dm-6', '_raw_kernel_device': '/dev/dm-6'}]}) TASK [Set _storage_pool_tests] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5 Saturday 28 March 2026 19:24:13 -0400 (0:00:00.476) 0:01:48.799 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Get VG shared value status] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18 Saturday 28 March 2026 19:24:13 -0400 (0:00:00.236) 0:01:49.035 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'", "skip_reason": "Conditional result was False" } TASK [Verify that VG shared value checks out] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24 Saturday 28 March 2026 19:24:14 -0400 (0:00:00.219) 0:01:49.255 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'", "skip_reason": "Conditional result was False" } TASK [Verify pool subset] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34 Saturday 28 March 2026 19:24:14 -0400 (0:00:00.167) 0:01:49.422 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node12 => (item=members) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node12 => (item=volumes) TASK [Set test variables] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2 Saturday 28 March 2026 19:24:14 -0400 (0:00:00.525) 0:01:49.948 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Get the canonical device path for each member device] ******************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8 Saturday 28 March 2026 19:24:15 -0400 (0:00:00.211) 0:01:50.159 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Set pvs lvm length] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17 Saturday 28 March 2026 19:24:15 -0400 (0:00:00.277) 0:01:50.437 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Set pool pvs] ************************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22 Saturday 28 March 2026 19:24:15 -0400 (0:00:00.205) 0:01:50.643 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Verify PV count] ********************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27 Saturday 28 March 2026 19:24:15 -0400 (0:00:00.219) 0:01:50.863 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Set expected pv type] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36 Saturday 28 March 2026 19:24:15 -0400 (0:00:00.218) 0:01:51.081 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Set expected pv type - 2] ************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41 Saturday 28 March 2026 19:24:16 -0400 (0:00:00.194) 0:01:51.275 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm' and not storage_test_pool.encryption", "skip_reason": "Conditional result was False" } TASK [Set expected pv type - 3] ************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46 Saturday 28 March 2026 19:24:16 -0400 (0:00:00.200) 0:01:51.485 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:55 Saturday 28 March 2026 19:24:16 -0400 (0:00:00.236) 0:01:51.721 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Check that blivet supports PV grow to fill] ****************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:68 Saturday 28 March 2026 19:24:16 -0400 (0:00:00.206) 0:01:51.928 ******** ok: [managed-node12] => { "changed": false, "failed_when_result": false, "rc": 0 } STDERR: OpenSSH_9.9p1, OpenSSL 3.5.5 27 Jan 2026 debug1: Reading configuration data /root/.ssh/config debug1: Reading configuration data /etc/ssh/ssh_config debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf debug2: checking match for 'final all' host 10.31.11.27 originally 10.31.11.27 debug2: match not found debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config debug1: configuration requests final Match pass debug2: resolve_canonicalize: hostname 10.31.11.27 is address debug1: re-parsing configuration debug1: Reading configuration data /root/.ssh/config debug1: Reading configuration data /etc/ssh/ssh_config debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf debug2: checking match for 'final all' host 10.31.11.27 originally 10.31.11.27 debug2: match found debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config debug1: auto-mux: Trying existing master at '/root/.ansible/cp/e1141b92e1' debug2: fd 3 setting O_NONBLOCK debug2: mux_client_hello_exchange: master version 4 debug1: mux_client_request_session: master session id: 2 debug2: Received exit status from master 0 Shared connection to 10.31.11.27 closed. TASK [Verify that PVs fill the whole devices when they should] ***************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:78 Saturday 28 March 2026 19:24:18 -0400 (0:00:01.271) 0:01:53.199 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Check MD RAID] *********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:88 Saturday 28 March 2026 19:24:18 -0400 (0:00:00.168) 0:01:53.367 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node12 TASK [Get information about RAID] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8 Saturday 28 March 2026 19:24:18 -0400 (0:00:00.605) 0:01:53.972 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14 Saturday 28 March 2026 19:24:19 -0400 (0:00:00.263) 0:01:54.236 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19 Saturday 28 March 2026 19:24:19 -0400 (0:00:00.204) 0:01:54.441 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24 Saturday 28 March 2026 19:24:19 -0400 (0:00:00.186) 0:01:54.627 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set md chunk size regex] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29 Saturday 28 March 2026 19:24:19 -0400 (0:00:00.240) 0:01:54.868 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37 Saturday 28 March 2026 19:24:19 -0400 (0:00:00.210) 0:01:55.078 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46 Saturday 28 March 2026 19:24:20 -0400 (0:00:00.220) 0:01:55.299 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55 Saturday 28 March 2026 19:24:20 -0400 (0:00:00.200) 0:01:55.499 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64 Saturday 28 March 2026 19:24:20 -0400 (0:00:00.193) 0:01:55.692 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74 Saturday 28 March 2026 19:24:20 -0400 (0:00:00.202) 0:01:55.895 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83 Saturday 28 March 2026 19:24:21 -0400 (0:00:00.263) 0:01:56.158 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_chunk_size_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:91 Saturday 28 March 2026 19:24:21 -0400 (0:00:00.280) 0:01:56.438 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node12 TASK [Validate pool member LVM RAID settings] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2 Saturday 28 March 2026 19:24:21 -0400 (0:00:00.501) 0:01:56.940 ******** skipping: [managed-node12] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'part_type': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=38081a08-cf75-4f9b-b424-e901b7e9592f', '_kernel_device': '/dev/dm-6', '_raw_kernel_device': '/dev/dm-6'}) => { "ansible_loop_var": "storage_test_lvmraid_volume", "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False", "storage_test_lvmraid_volume": { "_device": "/dev/stratis/foo/test1", "_kernel_device": "/dev/dm-6", "_mount_id": "UUID=38081a08-cf75-4f9b-b424-e901b7e9592f", "_raw_device": "/dev/stratis/foo/test1", "_raw_kernel_device": "/dev/dm-6", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } } skipping: [managed-node12] => { "changed": false } MSG: All items skipped TASK [Check Thin Pools] ******************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:94 Saturday 28 March 2026 19:24:22 -0400 (0:00:00.261) 0:01:57.207 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node12 TASK [Validate pool member thinpool settings] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2 Saturday 28 March 2026 19:24:22 -0400 (0:00:00.597) 0:01:57.805 ******** skipping: [managed-node12] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'part_type': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=38081a08-cf75-4f9b-b424-e901b7e9592f', '_kernel_device': '/dev/dm-6', '_raw_kernel_device': '/dev/dm-6'}) => { "ansible_loop_var": "storage_test_thin_volume", "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False", "storage_test_thin_volume": { "_device": "/dev/stratis/foo/test1", "_kernel_device": "/dev/dm-6", "_mount_id": "UUID=38081a08-cf75-4f9b-b424-e901b7e9592f", "_raw_device": "/dev/stratis/foo/test1", "_raw_kernel_device": "/dev/dm-6", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } } skipping: [managed-node12] => { "changed": false } MSG: All items skipped TASK [Check member encryption] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:97 Saturday 28 March 2026 19:24:23 -0400 (0:00:00.296) 0:01:58.102 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node12 TASK [Set test variables] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5 Saturday 28 March 2026 19:24:23 -0400 (0:00:00.582) 0:01:58.684 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10 Saturday 28 March 2026 19:24:23 -0400 (0:00:00.232) 0:01:58.917 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.encryption", "skip_reason": "Conditional result was False" } TASK [Validate pool member crypttab entries] *********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17 Saturday 28 March 2026 19:24:24 -0400 (0:00:00.184) 0:01:59.101 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24 Saturday 28 March 2026 19:24:24 -0400 (0:00:00.198) 0:01:59.299 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:100 Saturday 28 March 2026 19:24:24 -0400 (0:00:00.221) 0:01:59.520 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node12 TASK [Validate pool member VDO settings] *************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2 Saturday 28 March 2026 19:24:25 -0400 (0:00:00.623) 0:02:00.144 ******** skipping: [managed-node12] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'part_type': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=38081a08-cf75-4f9b-b424-e901b7e9592f', '_kernel_device': '/dev/dm-6', '_raw_kernel_device': '/dev/dm-6'}) => { "ansible_loop_var": "storage_test_vdo_volume", "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False", "storage_test_vdo_volume": { "_device": "/dev/stratis/foo/test1", "_kernel_device": "/dev/dm-6", "_mount_id": "UUID=38081a08-cf75-4f9b-b424-e901b7e9592f", "_raw_device": "/dev/stratis/foo/test1", "_raw_kernel_device": "/dev/dm-6", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } } skipping: [managed-node12] => { "changed": false } MSG: All items skipped TASK [Check Stratis] *********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:103 Saturday 28 March 2026 19:24:25 -0400 (0:00:00.343) 0:02:00.488 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node12 TASK [Get stratis pool information] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6 Saturday 28 March 2026 19:24:26 -0400 (0:00:01.237) 0:02:01.725 ******** ok: [managed-node12] => { "changed": false, "rc": 0 } STDOUT: {"name": "foo", "encrypted": false, "key_desc": null, "clevis_pin": null, "clevis_args": {}} STDERR: OpenSSH_9.9p1, OpenSSL 3.5.5 27 Jan 2026 debug1: Reading configuration data /root/.ssh/config debug1: Reading configuration data /etc/ssh/ssh_config debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf debug2: checking match for 'final all' host 10.31.11.27 originally 10.31.11.27 debug2: match not found debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config debug1: configuration requests final Match pass debug2: resolve_canonicalize: hostname 10.31.11.27 is address debug1: re-parsing configuration debug1: Reading configuration data /root/.ssh/config debug1: Reading configuration data /etc/ssh/ssh_config debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf debug2: checking match for 'final all' host 10.31.11.27 originally 10.31.11.27 debug2: match found debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config debug1: auto-mux: Trying existing master at '/root/.ansible/cp/e1141b92e1' debug2: fd 3 setting O_NONBLOCK debug2: mux_client_hello_exchange: master version 4 debug1: mux_client_request_session: master session id: 2 debug2: Received exit status from master 0 Shared connection to 10.31.11.27 closed. TASK [Print script output] ***************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15 Saturday 28 March 2026 19:24:27 -0400 (0:00:01.134) 0:02:02.859 ******** ok: [managed-node12] => {} MSG: {'name': 'foo', 'encrypted': False, 'key_desc': None, 'clevis_pin': None, 'clevis_args': {}} TASK [Get information about Stratis] ******************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:19 Saturday 28 March 2026 19:24:27 -0400 (0:00:00.233) 0:02:03.093 ******** ok: [managed-node12] => { "ansible_facts": { "_stratis_pool_info": { "clevis_args": {}, "clevis_pin": null, "encrypted": false, "key_desc": null, "name": "foo" } }, "changed": false } TASK [Verify that the pools was created] *************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:23 Saturday 28 March 2026 19:24:28 -0400 (0:00:00.312) 0:02:03.405 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Verify that encryption is correctly set] ********************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:30 Saturday 28 March 2026 19:24:28 -0400 (0:00:00.300) 0:02:03.705 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.encryption", "skip_reason": "Conditional result was False" } TASK [Verify that Clevis/Tang encryption is correctly set] ********************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:39 Saturday 28 March 2026 19:24:28 -0400 (0:00:00.223) 0:02:03.929 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.encryption", "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:49 Saturday 28 March 2026 19:24:29 -0400 (0:00:00.236) 0:02:04.165 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_stratis_report": null }, "changed": false } TASK [Clean up test variables] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:106 Saturday 28 March 2026 19:24:29 -0400 (0:00:00.246) 0:02:04.412 ******** ok: [managed-node12] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3 Saturday 28 March 2026 19:24:29 -0400 (0:00:00.228) 0:02:04.640 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node12 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'part_type': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=38081a08-cf75-4f9b-b424-e901b7e9592f', '_kernel_device': '/dev/dm-6', '_raw_kernel_device': '/dev/dm-6'}) TASK [Set storage volume test variables] *************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2 Saturday 28 March 2026 19:24:29 -0400 (0:00:00.406) 0:02:05.047 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_volume_present": true, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [Run test verify for storage_test_volume_subset] ************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19 Saturday 28 March 2026 19:24:30 -0400 (0:00:00.358) 0:02:05.405 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node12 => (item=mount) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node12 => (item=fstab) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node12 => (item=fs) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node12 => (item=device) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node12 => (item=encryption) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node12 => (item=md) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node12 => (item=size) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node12 => (item=cache) TASK [Get expected mount device based on device type] ************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7 Saturday 28 March 2026 19:24:32 -0400 (0:00:01.804) 0:02:07.210 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_device_path": "/dev/stratis/foo/test1" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11 Saturday 28 March 2026 19:24:32 -0400 (0:00:00.342) 0:02:07.552 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_mount_expected_mount_point": "/opt/test1", "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Get information about the mountpoint directory] ************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19 Saturday 28 March 2026 19:24:32 -0400 (0:00:00.358) 0:02:07.911 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "(not storage_test_volume.mount_user is none and storage_test_volume.mount_user | length > 0) or (not storage_test_volume.mount_group is none and storage_test_volume.mount_group | length > 0) or (not storage_test_volume.mount_mode is none and storage_test_volume.mount_mode | length > 0)", "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by device] ******************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:32 Saturday 28 March 2026 19:24:33 -0400 (0:00:00.401) 0:02:08.312 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Verify mount directory user] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:40 Saturday 28 March 2026 19:24:33 -0400 (0:00:00.264) 0:02:08.582 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "not storage_test_volume.mount_user is none", "skip_reason": "Conditional result was False" } TASK [Verify mount directory group] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:51 Saturday 28 March 2026 19:24:33 -0400 (0:00:00.252) 0:02:08.839 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "not storage_test_volume.mount_group is none", "skip_reason": "Conditional result was False" } TASK [Verify mount directory permissions] ************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:62 Saturday 28 March 2026 19:24:34 -0400 (0:00:00.286) 0:02:09.125 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "not storage_test_volume.mount_mode is none", "skip_reason": "Conditional result was False" } TASK [Get path of test volume device] ****************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:76 Saturday 28 March 2026 19:24:34 -0400 (0:00:00.379) 0:02:09.504 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:82 Saturday 28 March 2026 19:24:34 -0400 (0:00:00.241) 0:02:09.746 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:88 Saturday 28 March 2026 19:24:34 -0400 (0:00:00.184) 0:02:09.930 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:98 Saturday 28 March 2026 19:24:35 -0400 (0:00:00.197) 0:02:10.128 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_found_mount_stat": null, "storage_test_mount_expected_mount_point": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2 Saturday 28 March 2026 19:24:35 -0400 (0:00:00.276) 0:02:10.404 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "1", "storage_test_fstab_expected_mount_options_matches": "1", "storage_test_fstab_expected_mount_point_matches": "1", "storage_test_fstab_id_matches": [ "UUID=38081a08-cf75-4f9b-b424-e901b7e9592f " ], "storage_test_fstab_mount_options_matches": [ " /opt/test1 xfs defaults " ], "storage_test_fstab_mount_point_matches": [ " /opt/test1 " ] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17 Saturday 28 March 2026 19:24:35 -0400 (0:00:00.603) 0:02:11.007 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Verify the fstab mount point] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24 Saturday 28 March 2026 19:24:36 -0400 (0:00:00.314) 0:02:11.327 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33 Saturday 28 March 2026 19:24:36 -0400 (0:00:00.279) 0:02:11.607 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "__storage_verify_mount_options | d(false)", "skip_reason": "Conditional result was False" } TASK [Verify fingerprint] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45 Saturday 28 March 2026 19:24:36 -0400 (0:00:00.192) 0:02:11.800 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Clean up variables] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:52 Saturday 28 March 2026 19:24:36 -0400 (0:00:00.256) 0:02:12.056 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6 Saturday 28 March 2026 19:24:37 -0400 (0:00:00.230) 0:02:12.286 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type != \"stratis\"", "skip_reason": "Conditional result was False" } TASK [Verify fs label] ********************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14 Saturday 28 March 2026 19:24:37 -0400 (0:00:00.227) 0:02:12.513 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type != \"stratis\"", "skip_reason": "Conditional result was False" } TASK [See whether the device node is present] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3 Saturday 28 March 2026 19:24:37 -0400 (0:00:00.200) 0:02:12.714 ******** ok: [managed-node12] => { "changed": false, "stat": { "atime": 1774740232.0172572, "attr_flags": "", "attributes": [], "block_size": 512, "blocks": 0, "charset": "binary", "ctime": 1774740232.0172572, "dev": 6, "device_type": 64774, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 1496, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/symlink", "mode": "0660", "mtime": 1774740232.0172572, "nlink": 1, "path": "/dev/stratis/foo/test1", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9 Saturday 28 March 2026 19:24:38 -0400 (0:00:01.079) 0:02:13.793 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Verify the presence/absence of the device node - 2] ********************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16 Saturday 28 March 2026 19:24:39 -0400 (0:00:00.373) 0:02:14.167 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "not (_storage_test_volume_present or storage_test_volume.type == 'disk')", "skip_reason": "Conditional result was False" } TASK [Make sure we got info about this volume] ********************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23 Saturday 28 March 2026 19:24:39 -0400 (0:00:00.191) 0:02:14.358 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Process volume type (set initial value) (1/2)] *************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29 Saturday 28 March 2026 19:24:39 -0400 (0:00:00.239) 0:02:14.597 ******** ok: [managed-node12] => { "ansible_facts": { "st_volume_type": "stratis" }, "changed": false } TASK [Process volume type (get RAID value) (2/2)] ****************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33 Saturday 28 March 2026 19:24:39 -0400 (0:00:00.242) 0:02:14.839 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == \"raid\"", "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38 Saturday 28 March 2026 19:24:39 -0400 (0:00:00.223) 0:02:15.063 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Stat the LUKS device, if encrypted] ************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3 Saturday 28 March 2026 19:24:40 -0400 (0:00:00.332) 0:02:15.395 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10 Saturday 28 March 2026 19:24:40 -0400 (0:00:00.230) 0:02:15.626 ******** ok: [managed-node12] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: cryptsetup TASK [Collect LUKS info for this volume] *************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16 Saturday 28 March 2026 19:24:42 -0400 (0:00:01.573) 0:02:17.199 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.encryption and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22 Saturday 28 March 2026 19:24:42 -0400 (0:00:00.228) 0:02:17.427 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29 Saturday 28 March 2026 19:24:42 -0400 (0:00:00.198) 0:02:17.625 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40 Saturday 28 March 2026 19:24:42 -0400 (0:00:00.362) 0:02:17.988 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46 Saturday 28 March 2026 19:24:43 -0400 (0:00:00.203) 0:02:18.191 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51 Saturday 28 March 2026 19:24:43 -0400 (0:00:00.212) 0:02:18.403 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:64 Saturday 28 March 2026 19:24:43 -0400 (0:00:00.205) 0:02:18.619 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:77 Saturday 28 March 2026 19:24:43 -0400 (0:00:00.223) 0:02:18.843 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Set test variables] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:90 Saturday 28 March 2026 19:24:43 -0400 (0:00:00.234) 0:02:19.077 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:96 Saturday 28 March 2026 19:24:44 -0400 (0:00:00.360) 0:02:19.438 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:103 Saturday 28 March 2026 19:24:44 -0400 (0:00:00.308) 0:02:19.747 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:111 Saturday 28 March 2026 19:24:44 -0400 (0:00:00.238) 0:02:19.986 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:119 Saturday 28 March 2026 19:24:45 -0400 (0:00:00.283) 0:02:20.269 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:127 Saturday 28 March 2026 19:24:45 -0400 (0:00:00.243) 0:02:20.512 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [Get information about RAID] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8 Saturday 28 March 2026 19:24:45 -0400 (0:00:00.279) 0:02:20.792 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14 Saturday 28 March 2026 19:24:45 -0400 (0:00:00.203) 0:02:20.995 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19 Saturday 28 March 2026 19:24:46 -0400 (0:00:00.210) 0:02:21.206 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24 Saturday 28 March 2026 19:24:46 -0400 (0:00:00.203) 0:02:21.409 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set chunk size regex] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29 Saturday 28 March 2026 19:24:46 -0400 (0:00:00.186) 0:02:21.596 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37 Saturday 28 March 2026 19:24:46 -0400 (0:00:00.201) 0:02:21.797 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46 Saturday 28 March 2026 19:24:46 -0400 (0:00:00.187) 0:02:21.985 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54 Saturday 28 March 2026 19:24:47 -0400 (0:00:00.225) 0:02:22.210 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62 Saturday 28 March 2026 19:24:47 -0400 (0:00:00.230) 0:02:22.441 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70 Saturday 28 March 2026 19:24:47 -0400 (0:00:00.215) 0:02:22.656 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Parse the actual size of the volume] ************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3 Saturday 28 March 2026 19:24:47 -0400 (0:00:00.208) 0:02:22.865 ******** ok: [managed-node12] => { "bytes": 4294967296, "changed": false, "lvm": "4g", "parted": "4GiB", "size": "4 GiB" } TASK [Parse the requested size of the volume] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11 Saturday 28 March 2026 19:24:49 -0400 (0:00:01.980) 0:02:24.846 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == \"lvm\"", "skip_reason": "Conditional result was False" } TASK [Establish base value for expected size] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20 Saturday 28 March 2026 19:24:49 -0400 (0:00:00.191) 0:02:25.037 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == \"lvm\"", "skip_reason": "Conditional result was False" } TASK [Show expected size] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28 Saturday 28 March 2026 19:24:50 -0400 (0:00:00.178) 0:02:25.216 ******** ok: [managed-node12] => { "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined" } TASK [Get the size of parent/pool device] ************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32 Saturday 28 March 2026 19:24:50 -0400 (0:00:00.216) 0:02:25.433 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == \"lvm\"", "skip_reason": "Conditional result was False" } TASK [Show test pool] ********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46 Saturday 28 March 2026 19:24:50 -0400 (0:00:00.223) 0:02:25.656 ******** skipping: [managed-node12] => { "false_condition": "storage_test_volume.type == \"lvm\"" } TASK [Show test blockinfo] ***************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50 Saturday 28 March 2026 19:24:50 -0400 (0:00:00.200) 0:02:25.856 ******** skipping: [managed-node12] => { "false_condition": "storage_test_volume.type == \"lvm\"" } TASK [Show test pool size] ***************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54 Saturday 28 March 2026 19:24:50 -0400 (0:00:00.221) 0:02:26.078 ******** skipping: [managed-node12] => { "false_condition": "storage_test_volume.type == \"lvm\"" } TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58 Saturday 28 March 2026 19:24:51 -0400 (0:00:00.221) 0:02:26.299 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == \"lvm\"", "skip_reason": "Conditional result was False" } TASK [Default thin pool reserved space values] ********************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:68 Saturday 28 March 2026 19:24:51 -0400 (0:00:00.184) 0:02:26.484 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Default minimal thin pool reserved space size] *************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:72 Saturday 28 March 2026 19:24:51 -0400 (0:00:00.245) 0:02:26.729 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Default maximal thin pool reserved space size] *************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:77 Saturday 28 March 2026 19:24:51 -0400 (0:00:00.227) 0:02:26.957 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Calculate maximum usable space in thin pool] ***************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:83 Saturday 28 March 2026 19:24:52 -0400 (0:00:00.360) 0:02:27.317 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Apply upper size limit to max usable thin pool space] ******************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:87 Saturday 28 March 2026 19:24:52 -0400 (0:00:00.232) 0:02:27.550 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Apply lower size limit to max usable thin pool space] ******************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:92 Saturday 28 March 2026 19:24:52 -0400 (0:00:00.277) 0:02:27.828 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Convert maximum usable thin pool space from int to Size] ***************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:97 Saturday 28 March 2026 19:24:52 -0400 (0:00:00.254) 0:02:28.083 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Show max thin pool size] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:102 Saturday 28 March 2026 19:24:53 -0400 (0:00:00.251) 0:02:28.334 ******** skipping: [managed-node12] => { "false_condition": "storage_test_volume.thin | bool" } TASK [Show volume thin pool size] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:106 Saturday 28 March 2026 19:24:53 -0400 (0:00:00.261) 0:02:28.596 ******** skipping: [managed-node12] => { "false_condition": "storage_test_volume.thin | bool" } TASK [Show test volume size] *************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:110 Saturday 28 March 2026 19:24:53 -0400 (0:00:00.268) 0:02:28.865 ******** skipping: [managed-node12] => { "false_condition": "storage_test_volume.thin | bool" } TASK [Establish base value for expected thin pool size] ************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:114 Saturday 28 March 2026 19:24:54 -0400 (0:00:00.268) 0:02:29.133 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Calculate the expected size based on pool size and percentage value - 2] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:121 Saturday 28 March 2026 19:24:54 -0400 (0:00:00.315) 0:02:29.448 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Establish base value for expected thin pool volume size] ***************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:128 Saturday 28 March 2026 19:24:54 -0400 (0:00:00.353) 0:02:29.802 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Calculate the expected thin pool volume size based on percentage value] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:132 Saturday 28 March 2026 19:24:54 -0400 (0:00:00.314) 0:02:30.116 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Replace expected volume size with calculated value] ********************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:138 Saturday 28 March 2026 19:24:55 -0400 (0:00:00.274) 0:02:30.391 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Show actual size] ******************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:144 Saturday 28 March 2026 19:24:55 -0400 (0:00:00.458) 0:02:30.849 ******** ok: [managed-node12] => { "storage_test_actual_size": { "bytes": 4294967296, "changed": false, "failed": false, "lvm": "4g", "parted": "4GiB", "size": "4 GiB" } } TASK [Show expected size - 2] ************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:148 Saturday 28 March 2026 19:24:56 -0400 (0:00:00.316) 0:02:31.165 ******** ok: [managed-node12] => { "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined" } TASK [Assert expected size is actual size] ************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:152 Saturday 28 March 2026 19:24:56 -0400 (0:00:00.301) 0:02:31.466 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == \"lvm\"", "skip_reason": "Conditional result was False" } TASK [Get information about the LV] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5 Saturday 28 March 2026 19:24:56 -0400 (0:00:00.245) 0:02:31.712 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13 Saturday 28 March 2026 19:24:56 -0400 (0:00:00.244) 0:02:31.956 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17 Saturday 28 March 2026 19:24:57 -0400 (0:00:00.247) 0:02:32.204 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Set LV cache size] ******************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24 Saturday 28 March 2026 19:24:57 -0400 (0:00:00.257) 0:02:32.462 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Parse the requested cache size] ****************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31 Saturday 28 March 2026 19:24:57 -0400 (0:00:00.240) 0:02:32.702 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Set expected cache size] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37 Saturday 28 March 2026 19:24:57 -0400 (0:00:00.260) 0:02:32.962 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42 Saturday 28 March 2026 19:24:58 -0400 (0:00:00.229) 0:02:33.191 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25 Saturday 28 March 2026 19:24:58 -0400 (0:00:00.250) 0:02:33.442 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:43 Saturday 28 March 2026 19:24:58 -0400 (0:00:00.316) 0:02:33.758 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Clean up variable namespace] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:52 Saturday 28 March 2026 19:24:58 -0400 (0:00:00.229) 0:02:33.987 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Repeat the previous invocation to verify idempotence] ******************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:75 Saturday 28 March 2026 19:24:59 -0400 (0:00:00.256) 0:02:34.244 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tasks/run_role_with_clear_facts.yml for managed-node12 TASK [Clear facts] ************************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tasks/run_role_with_clear_facts.yml:10 Saturday 28 March 2026 19:24:59 -0400 (0:00:00.427) 0:02:34.672 ******** META: facts cleared TASK [Run the role] ************************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tasks/run_role_with_clear_facts.yml:24 Saturday 28 March 2026 19:24:59 -0400 (0:00:00.077) 0:02:34.750 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "__sr_failed_when is defined", "skip_reason": "Conditional result was False" } TASK [Run the role normally] *************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tasks/run_role_with_clear_facts.yml:34 Saturday 28 March 2026 19:24:59 -0400 (0:00:00.193) 0:02:34.943 ******** included: fedora.linux_system_roles.storage for managed-node12 TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Saturday 28 March 2026 19:25:00 -0400 (0:00:00.332) 0:02:35.276 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node12 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Saturday 28 March 2026 19:25:00 -0400 (0:00:00.248) 0:02:35.524 ******** ok: [managed-node12] TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Saturday 28 March 2026 19:25:02 -0400 (0:00:02.094) 0:02:37.618 ******** skipping: [managed-node12] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node12] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node12] => (item=CentOS_10.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-fs", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "xfsprogs", "stratisd", "stratis-cli", "{{ 'libblockdev-s390' if ansible_facts['architecture'] == 's390x' else 'libblockdev' }}", "vdo" ] }, "ansible_included_var_files": [ "/tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node12] => (item=CentOS_10.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-fs", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "xfsprogs", "stratisd", "stratis-cli", "{{ 'libblockdev-s390' if ansible_facts['architecture'] == 's390x' else 'libblockdev' }}", "vdo" ] }, "ansible_included_var_files": [ "/tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Saturday 28 March 2026 19:25:03 -0400 (0:00:01.216) 0:02:38.835 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "not __storage_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Saturday 28 March 2026 19:25:04 -0400 (0:00:01.109) 0:02:39.945 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "not __storage_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Saturday 28 March 2026 19:25:05 -0400 (0:00:00.252) 0:02:40.197 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Saturday 28 March 2026 19:25:05 -0400 (0:00:00.247) 0:02:40.449 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Saturday 28 March 2026 19:25:05 -0400 (0:00:00.227) 0:02:40.676 ******** redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node12 TASK [fedora.linux_system_roles.storage : Add repo key] ************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Saturday 28 March 2026 19:25:06 -0400 (0:00:00.693) 0:02:41.370 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_blivet_custom_repo.key is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Add blivet repo] ********************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:15 Saturday 28 March 2026 19:25:06 -0400 (0:00:00.243) 0:02:41.613 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_blivet_custom_repo.key is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:20 Saturday 28 March 2026 19:25:06 -0400 (0:00:00.265) 0:02:41.879 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:27 Saturday 28 March 2026 19:25:07 -0400 (0:00:00.261) 0:02:42.141 ******** ok: [managed-node12] => { "storage_pools | d([])": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "name": "foo", "type": "stratis", "volumes": [ { "mount_point": "/opt/test1", "name": "test1", "size": "4g" } ] } ] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:32 Saturday 28 March 2026 19:25:07 -0400 (0:00:00.319) 0:02:42.460 ******** ok: [managed-node12] => { "storage_volumes | d([])": [] } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37 Saturday 28 March 2026 19:25:07 -0400 (0:00:00.272) 0:02:42.733 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:50 Saturday 28 March 2026 19:25:07 -0400 (0:00:00.336) 0:02:43.069 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:56 Saturday 28 March 2026 19:25:08 -0400 (0:00:00.229) 0:02:43.299 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:76 Saturday 28 March 2026 19:25:08 -0400 (0:00:00.251) 0:02:43.564 ******** ok: [managed-node12] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "apt-daily.service": { "name": "apt-daily.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fcoe.service": { "name": "fcoe.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "iscsi-shutdown.service": { "name": "iscsi-shutdown.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsi.service": { "name": "iscsi.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsid.service": { "name": "iscsid.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-activation-early.service": { "name": "lvm2-activation-early.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_multipath.service": { "name": "modprobe@dm_multipath.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "raid-check.service": { "name": "raid-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "rbdmap.service": { "name": "rbdmap.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-unix-local@.service": { "name": "sshd-unix-local@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd-vsock@.service": { "name": "sshd-vsock@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "stratis-fstab-setup-with-network@.service": { "name": "stratis-fstab-setup-with-network@.service", "source": "systemd", "state": "unknown", "status": "static" }, "stratis-fstab-setup@.service": { "name": "stratis-fstab-setup@.service", "source": "systemd", "state": "unknown", "status": "static" }, "stratisd-min-postinitrd.service": { "name": "stratisd-min-postinitrd.service", "source": "systemd", "state": "inactive", "status": "static" }, "stratisd.service": { "name": "stratisd.service", "source": "systemd", "state": "running", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-userdbd.service": { "name": "systemd-userdbd.service", "source": "systemd", "state": "running", "status": "indirect" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:82 Saturday 28 March 2026 19:25:11 -0400 (0:00:03.207) 0:02:46.772 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:88 Saturday 28 March 2026 19:25:12 -0400 (0:00:00.445) 0:02:47.217 ******** ok: [managed-node12] => { "actions": [], "changed": false, "crypts": [], "leaves": [ "/dev/stratis/foo/test1", "/dev/xvda1", "/dev/xvda2" ], "mounts": [ { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "UUID=38081a08-cf75-4f9b-b424-e901b7e9592f", "state": "mounted" } ], "packages": [ "xfsprogs", "stratis-cli", "stratisd" ], "pools": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "stratis", "volumes": [ { "_device": "/dev/stratis/foo/test1", "_kernel_device": "/dev/dm-6", "_mount_id": "UUID=38081a08-cf75-4f9b-b424-e901b7e9592f", "_raw_device": "/dev/stratis/foo/test1", "_raw_kernel_device": "/dev/dm-6", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:103 Saturday 28 March 2026 19:25:20 -0400 (0:00:08.155) 0:02:55.373 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_udevadm_trigger | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ****** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:110 Saturday 28 March 2026 19:25:20 -0400 (0:00:00.160) 0:02:55.534 ******** ok: [managed-node12] => { "changed": false, "stat": { "atime": 1774740241.5293837, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "b4ba8a954ffa41e5183e741617ef38130014deca", "ctime": 1774740241.5267115, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 230686985, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1774740241.5267115, "nlink": 1, "path": "/etc/fstab", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1478, "uid": 0, "version": "1679064280", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:115 Saturday 28 March 2026 19:25:21 -0400 (0:00:01.072) 0:02:56.606 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "blivet_output is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:133 Saturday 28 March 2026 19:25:21 -0400 (0:00:00.299) 0:02:56.906 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:139 Saturday 28 March 2026 19:25:22 -0400 (0:00:00.501) 0:02:57.407 ******** ok: [managed-node12] => { "blivet_output": { "actions": [], "changed": false, "crypts": [], "failed": false, "leaves": [ "/dev/stratis/foo/test1", "/dev/xvda1", "/dev/xvda2" ], "mounts": [ { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "UUID=38081a08-cf75-4f9b-b424-e901b7e9592f", "state": "mounted" } ], "packages": [ "xfsprogs", "stratis-cli", "stratisd" ], "pools": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "stratis", "volumes": [ { "_device": "/dev/stratis/foo/test1", "_kernel_device": "/dev/dm-6", "_mount_id": "UUID=38081a08-cf75-4f9b-b424-e901b7e9592f", "_raw_device": "/dev/stratis/foo/test1", "_raw_kernel_device": "/dev/dm-6", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148 Saturday 28 March 2026 19:25:22 -0400 (0:00:00.334) 0:02:57.742 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "stratis", "volumes": [ { "_device": "/dev/stratis/foo/test1", "_kernel_device": "/dev/dm-6", "_mount_id": "UUID=38081a08-cf75-4f9b-b424-e901b7e9592f", "_raw_device": "/dev/stratis/foo/test1", "_raw_kernel_device": "/dev/dm-6", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:152 Saturday 28 March 2026 19:25:22 -0400 (0:00:00.305) 0:02:58.047 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:168 Saturday 28 March 2026 19:25:23 -0400 (0:00:00.296) 0:02:58.344 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:179 Saturday 28 March 2026 19:25:23 -0400 (0:00:00.361) 0:02:58.705 ******** ok: [managed-node12] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:184 Saturday 28 March 2026 19:25:25 -0400 (0:00:01.587) 0:03:00.293 ******** redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount ok: [managed-node12] => (item={'src': 'UUID=38081a08-cf75-4f9b-b424-e901b7e9592f', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => { "ansible_loop_var": "mount_info", "backup_file": "", "boot": "yes", "changed": false, "dump": "0", "fstab": "/etc/fstab", "fstype": "xfs", "mount_info": { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "UUID=38081a08-cf75-4f9b-b424-e901b7e9592f", "state": "mounted" }, "name": "/opt/test1", "opts": "defaults", "passno": "0", "src": "UUID=38081a08-cf75-4f9b-b424-e901b7e9592f" } TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195 Saturday 28 March 2026 19:25:26 -0400 (0:00:01.323) 0:03:01.616 ******** skipping: [managed-node12] => (item={'src': 'UUID=38081a08-cf75-4f9b-b424-e901b7e9592f', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => { "ansible_loop_var": "mount_info", "changed": false, "false_condition": "mount_info['owner'] != none or mount_info['group'] != none or mount_info['mode'] != none", "mount_info": { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "UUID=38081a08-cf75-4f9b-b424-e901b7e9592f", "state": "mounted" }, "skip_reason": "Conditional result was False" } skipping: [managed-node12] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:207 Saturday 28 March 2026 19:25:26 -0400 (0:00:00.370) 0:03:01.986 ******** ok: [managed-node12] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:215 Saturday 28 March 2026 19:25:28 -0400 (0:00:01.505) 0:03:03.491 ******** ok: [managed-node12] => { "changed": false, "stat": { "atime": 1774737762.6954868, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1774342704.475, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 4194435, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1774342323.629, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "1224473831", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:220 Saturday 28 March 2026 19:25:29 -0400 (0:00:01.054) 0:03:04.546 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:242 Saturday 28 March 2026 19:25:29 -0400 (0:00:00.217) 0:03:04.763 ******** ok: [managed-node12] TASK [Verify role results - 2] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:87 Saturday 28 March 2026 19:25:31 -0400 (0:00:01.901) 0:03:06.665 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node12 TASK [Print out pool information] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2 Saturday 28 March 2026 19:25:31 -0400 (0:00:00.252) 0:03:06.917 ******** ok: [managed-node12] => { "_storage_pools_list": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "stratis", "volumes": [ { "_device": "/dev/stratis/foo/test1", "_kernel_device": "/dev/dm-6", "_mount_id": "UUID=38081a08-cf75-4f9b-b424-e901b7e9592f", "_raw_device": "/dev/stratis/foo/test1", "_raw_kernel_device": "/dev/dm-6", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } ] } ] } TASK [Print out volume information] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7 Saturday 28 March 2026 19:25:32 -0400 (0:00:00.249) 0:03:07.167 ******** skipping: [managed-node12] => { "false_condition": "_storage_volumes_list | length > 0" } TASK [Collect info about the volumes.] ***************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15 Saturday 28 March 2026 19:25:32 -0400 (0:00:00.186) 0:03:07.355 ******** ok: [managed-node12] => { "changed": false, "info": { "/dev/loop0": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/loop0", "size": "0B", "type": "loop", "uuid": "" }, "/dev/mapper/stratis-1-private-98c86bd3ff4a4ace81fea7a7d8071f6f-flex-mdv": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/mapper/stratis-1-private-98c86bd3ff4a4ace81fea7a7d8071f6f-flex-mdv", "size": "512M", "type": "stratis", "uuid": "" }, "/dev/mapper/stratis-1-private-98c86bd3ff4a4ace81fea7a7d8071f6f-flex-thindata": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/mapper/stratis-1-private-98c86bd3ff4a4ace81fea7a7d8071f6f-flex-thindata", "size": "50G", "type": "stratis", "uuid": "" }, "/dev/mapper/stratis-1-private-98c86bd3ff4a4ace81fea7a7d8071f6f-flex-thinmeta": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/mapper/stratis-1-private-98c86bd3ff4a4ace81fea7a7d8071f6f-flex-thinmeta", "size": "786M", "type": "stratis", "uuid": "" }, "/dev/mapper/stratis-1-private-98c86bd3ff4a4ace81fea7a7d8071f6f-physical-cache": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/mapper/stratis-1-private-98c86bd3ff4a4ace81fea7a7d8071f6f-physical-cache", "size": "52.1G", "type": "stratis", "uuid": "" }, "/dev/mapper/stratis-1-private-98c86bd3ff4a4ace81fea7a7d8071f6f-physical-originsub": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/mapper/stratis-1-private-98c86bd3ff4a4ace81fea7a7d8071f6f-physical-originsub", "size": "52.1G", "type": "stratis", "uuid": "" }, "/dev/mapper/stratis-1-private-98c86bd3ff4a4ace81fea7a7d8071f6f-thinpool-pool": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/mapper/stratis-1-private-98c86bd3ff4a4ace81fea7a7d8071f6f-thinpool-pool", "size": "50G", "type": "stratis", "uuid": "" }, "/dev/sda": { "fstype": "stratis", "label": "", "mountpoint": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "26de8ef7-1afd-484c-8227-390830b9f791" }, "/dev/sdb": { "fstype": "stratis", "label": "", "mountpoint": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "c1b3a6f2-c13b-473a-8a6e-3ef837ed2f6e" }, "/dev/sdc": { "fstype": "stratis", "label": "", "mountpoint": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "41fb917c-951e-41b2-8c83-03a53bb3c710" }, "/dev/sdd": { "fstype": "stratis", "label": "", "mountpoint": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "926c9bd5-a194-4984-9b26-e2a194676c27" }, "/dev/sde": { "fstype": "stratis", "label": "", "mountpoint": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "755bef06-2819-4fe2-8faf-f6eee9e80010" }, "/dev/sdf": { "fstype": "stratis", "label": "", "mountpoint": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "9489957b-dcba-4642-bfad-4733daeb8844" }, "/dev/sdg": { "fstype": "stratis", "label": "", "mountpoint": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "9795169f-078b-4914-a09d-19e69dc3300e" }, "/dev/sdh": { "fstype": "stratis", "label": "", "mountpoint": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "b094d332-a66e-4a40-ab09-7ae241f2aeff" }, "/dev/sdi": { "fstype": "stratis", "label": "", "mountpoint": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "46644e71-1703-4adf-beee-bda271fc3b06" }, "/dev/stratis/foo/test1": { "fstype": "xfs", "label": "", "mountpoint": "/opt/test1", "name": "/dev/stratis/foo/test1", "size": "4G", "type": "stratis", "uuid": "38081a08-cf75-4f9b-b424-e901b7e9592f" }, "/dev/xvda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda1", "size": "1M", "type": "partition", "uuid": "" }, "/dev/xvda2": { "fstype": "xfs", "label": "", "mountpoint": "/", "name": "/dev/xvda2", "size": "250G", "type": "partition", "uuid": "94fc577e-f1df-44bb-8e86-63b9a68f8f7f" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20 Saturday 28 March 2026 19:25:33 -0400 (0:00:01.134) 0:03:08.489 ******** ok: [managed-node12] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.002997", "end": "2026-03-28 19:25:34.307211", "rc": 0, "start": "2026-03-28 19:25:34.304214" } STDOUT: # system_role:storage # # /etc/fstab # Created by anaconda on Tue Mar 24 08:52:03 2026 # # Accessible filesystems, by reference, are maintained under '/dev/disk/'. # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info. # # After editing this file, run 'systemctl daemon-reload' to update systemd # units generated from this file. # UUID=94fc577e-f1df-44bb-8e86-63b9a68f8f7f / xfs defaults 0 0 ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 UUID=38081a08-cf75-4f9b-b424-e901b7e9592f /opt/test1 xfs defaults 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25 Saturday 28 March 2026 19:25:34 -0400 (0:00:01.098) 0:03:09.588 ******** ok: [managed-node12] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.003486", "end": "2026-03-28 19:25:35.300586", "failed_when_result": false, "rc": 0, "start": "2026-03-28 19:25:35.297100" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34 Saturday 28 March 2026 19:25:35 -0400 (0:00:01.021) 0:03:10.620 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node12 => (item={'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'stratis', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'part_type': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=38081a08-cf75-4f9b-b424-e901b7e9592f', '_kernel_device': '/dev/dm-6', '_raw_kernel_device': '/dev/dm-6'}]}) TASK [Set _storage_pool_tests] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5 Saturday 28 March 2026 19:25:36 -0400 (0:00:00.511) 0:03:11.132 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Get VG shared value status] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18 Saturday 28 March 2026 19:25:36 -0400 (0:00:00.258) 0:03:11.390 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'", "skip_reason": "Conditional result was False" } TASK [Verify that VG shared value checks out] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24 Saturday 28 March 2026 19:25:36 -0400 (0:00:00.210) 0:03:11.601 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'", "skip_reason": "Conditional result was False" } TASK [Verify pool subset] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34 Saturday 28 March 2026 19:25:36 -0400 (0:00:00.236) 0:03:11.837 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node12 => (item=members) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node12 => (item=volumes) TASK [Set test variables] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2 Saturday 28 March 2026 19:25:37 -0400 (0:00:00.524) 0:03:12.362 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Get the canonical device path for each member device] ******************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8 Saturday 28 March 2026 19:25:37 -0400 (0:00:00.251) 0:03:12.613 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Set pvs lvm length] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17 Saturday 28 March 2026 19:25:37 -0400 (0:00:00.194) 0:03:12.807 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Set pool pvs] ************************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22 Saturday 28 March 2026 19:25:37 -0400 (0:00:00.236) 0:03:13.044 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Verify PV count] ********************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27 Saturday 28 March 2026 19:25:38 -0400 (0:00:00.193) 0:03:13.237 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Set expected pv type] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36 Saturday 28 March 2026 19:25:38 -0400 (0:00:00.221) 0:03:13.458 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Set expected pv type - 2] ************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41 Saturday 28 March 2026 19:25:38 -0400 (0:00:00.203) 0:03:13.662 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm' and not storage_test_pool.encryption", "skip_reason": "Conditional result was False" } TASK [Set expected pv type - 3] ************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46 Saturday 28 March 2026 19:25:38 -0400 (0:00:00.232) 0:03:13.895 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:55 Saturday 28 March 2026 19:25:39 -0400 (0:00:00.230) 0:03:14.125 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Check that blivet supports PV grow to fill] ****************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:68 Saturday 28 March 2026 19:25:39 -0400 (0:00:00.166) 0:03:14.291 ******** ok: [managed-node12] => { "changed": false, "failed_when_result": false, "rc": 0 } STDERR: OpenSSH_9.9p1, OpenSSL 3.5.5 27 Jan 2026 debug1: Reading configuration data /root/.ssh/config debug1: Reading configuration data /etc/ssh/ssh_config debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf debug2: checking match for 'final all' host 10.31.11.27 originally 10.31.11.27 debug2: match not found debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config debug1: configuration requests final Match pass debug2: resolve_canonicalize: hostname 10.31.11.27 is address debug1: re-parsing configuration debug1: Reading configuration data /root/.ssh/config debug1: Reading configuration data /etc/ssh/ssh_config debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf debug2: checking match for 'final all' host 10.31.11.27 originally 10.31.11.27 debug2: match found debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config debug1: auto-mux: Trying existing master at '/root/.ansible/cp/e1141b92e1' debug2: fd 3 setting O_NONBLOCK debug2: mux_client_hello_exchange: master version 4 debug1: mux_client_request_session: master session id: 2 debug2: Received exit status from master 0 Shared connection to 10.31.11.27 closed. TASK [Verify that PVs fill the whole devices when they should] ***************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:78 Saturday 28 March 2026 19:25:40 -0400 (0:00:01.085) 0:03:15.377 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Check MD RAID] *********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:88 Saturday 28 March 2026 19:25:40 -0400 (0:00:00.314) 0:03:15.691 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node12 TASK [Get information about RAID] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8 Saturday 28 March 2026 19:25:41 -0400 (0:00:00.570) 0:03:16.262 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14 Saturday 28 March 2026 19:25:41 -0400 (0:00:00.189) 0:03:16.451 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19 Saturday 28 March 2026 19:25:41 -0400 (0:00:00.196) 0:03:16.655 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24 Saturday 28 March 2026 19:25:41 -0400 (0:00:00.222) 0:03:16.878 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set md chunk size regex] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29 Saturday 28 March 2026 19:25:42 -0400 (0:00:00.246) 0:03:17.124 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37 Saturday 28 March 2026 19:25:42 -0400 (0:00:00.207) 0:03:17.332 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46 Saturday 28 March 2026 19:25:42 -0400 (0:00:00.186) 0:03:17.518 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55 Saturday 28 March 2026 19:25:42 -0400 (0:00:00.213) 0:03:17.732 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64 Saturday 28 March 2026 19:25:42 -0400 (0:00:00.171) 0:03:17.903 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74 Saturday 28 March 2026 19:25:43 -0400 (0:00:00.213) 0:03:18.117 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83 Saturday 28 March 2026 19:25:43 -0400 (0:00:00.209) 0:03:18.326 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_chunk_size_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:91 Saturday 28 March 2026 19:25:43 -0400 (0:00:00.165) 0:03:18.491 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node12 TASK [Validate pool member LVM RAID settings] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2 Saturday 28 March 2026 19:25:43 -0400 (0:00:00.489) 0:03:18.981 ******** skipping: [managed-node12] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'part_type': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=38081a08-cf75-4f9b-b424-e901b7e9592f', '_kernel_device': '/dev/dm-6', '_raw_kernel_device': '/dev/dm-6'}) => { "ansible_loop_var": "storage_test_lvmraid_volume", "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False", "storage_test_lvmraid_volume": { "_device": "/dev/stratis/foo/test1", "_kernel_device": "/dev/dm-6", "_mount_id": "UUID=38081a08-cf75-4f9b-b424-e901b7e9592f", "_raw_device": "/dev/stratis/foo/test1", "_raw_kernel_device": "/dev/dm-6", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } } skipping: [managed-node12] => { "changed": false } MSG: All items skipped TASK [Check Thin Pools] ******************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:94 Saturday 28 March 2026 19:25:44 -0400 (0:00:00.342) 0:03:19.324 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node12 TASK [Validate pool member thinpool settings] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2 Saturday 28 March 2026 19:25:44 -0400 (0:00:00.517) 0:03:19.841 ******** skipping: [managed-node12] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'part_type': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=38081a08-cf75-4f9b-b424-e901b7e9592f', '_kernel_device': '/dev/dm-6', '_raw_kernel_device': '/dev/dm-6'}) => { "ansible_loop_var": "storage_test_thin_volume", "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False", "storage_test_thin_volume": { "_device": "/dev/stratis/foo/test1", "_kernel_device": "/dev/dm-6", "_mount_id": "UUID=38081a08-cf75-4f9b-b424-e901b7e9592f", "_raw_device": "/dev/stratis/foo/test1", "_raw_kernel_device": "/dev/dm-6", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } } skipping: [managed-node12] => { "changed": false } MSG: All items skipped TASK [Check member encryption] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:97 Saturday 28 March 2026 19:25:45 -0400 (0:00:00.295) 0:03:20.136 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node12 TASK [Set test variables] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5 Saturday 28 March 2026 19:25:45 -0400 (0:00:00.660) 0:03:20.797 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10 Saturday 28 March 2026 19:25:45 -0400 (0:00:00.207) 0:03:21.004 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Validate pool member crypttab entries] *********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17 Saturday 28 March 2026 19:25:46 -0400 (0:00:00.151) 0:03:21.155 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24 Saturday 28 March 2026 19:25:46 -0400 (0:00:00.195) 0:03:21.351 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:100 Saturday 28 March 2026 19:25:46 -0400 (0:00:00.283) 0:03:21.635 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node12 TASK [Validate pool member VDO settings] *************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2 Saturday 28 March 2026 19:25:47 -0400 (0:00:00.652) 0:03:22.288 ******** skipping: [managed-node12] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'part_type': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=38081a08-cf75-4f9b-b424-e901b7e9592f', '_kernel_device': '/dev/dm-6', '_raw_kernel_device': '/dev/dm-6'}) => { "ansible_loop_var": "storage_test_vdo_volume", "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False", "storage_test_vdo_volume": { "_device": "/dev/stratis/foo/test1", "_kernel_device": "/dev/dm-6", "_mount_id": "UUID=38081a08-cf75-4f9b-b424-e901b7e9592f", "_raw_device": "/dev/stratis/foo/test1", "_raw_kernel_device": "/dev/dm-6", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } } skipping: [managed-node12] => { "changed": false } MSG: All items skipped TASK [Check Stratis] *********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:103 Saturday 28 March 2026 19:25:47 -0400 (0:00:00.306) 0:03:22.594 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node12 TASK [Get stratis pool information] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6 Saturday 28 March 2026 19:25:48 -0400 (0:00:00.781) 0:03:23.376 ******** ok: [managed-node12] => { "changed": false, "rc": 0 } STDOUT: {"name": "foo", "encrypted": false, "key_desc": null, "clevis_pin": null, "clevis_args": {}} STDERR: OpenSSH_9.9p1, OpenSSL 3.5.5 27 Jan 2026 debug1: Reading configuration data /root/.ssh/config debug1: Reading configuration data /etc/ssh/ssh_config debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf debug2: checking match for 'final all' host 10.31.11.27 originally 10.31.11.27 debug2: match not found debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config debug1: configuration requests final Match pass debug2: resolve_canonicalize: hostname 10.31.11.27 is address debug1: re-parsing configuration debug1: Reading configuration data /root/.ssh/config debug1: Reading configuration data /etc/ssh/ssh_config debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf debug2: checking match for 'final all' host 10.31.11.27 originally 10.31.11.27 debug2: match found debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config debug1: auto-mux: Trying existing master at '/root/.ansible/cp/e1141b92e1' debug2: fd 3 setting O_NONBLOCK debug2: mux_client_hello_exchange: master version 4 debug1: mux_client_request_session: master session id: 2 debug2: Received exit status from master 0 Shared connection to 10.31.11.27 closed. TASK [Print script output] ***************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15 Saturday 28 March 2026 19:25:49 -0400 (0:00:01.269) 0:03:24.646 ******** ok: [managed-node12] => {} MSG: {'name': 'foo', 'encrypted': False, 'key_desc': None, 'clevis_pin': None, 'clevis_args': {}} TASK [Get information about Stratis] ******************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:19 Saturday 28 March 2026 19:25:49 -0400 (0:00:00.313) 0:03:24.967 ******** ok: [managed-node12] => { "ansible_facts": { "_stratis_pool_info": { "clevis_args": {}, "clevis_pin": null, "encrypted": false, "key_desc": null, "name": "foo" } }, "changed": false } TASK [Verify that the pools was created] *************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:23 Saturday 28 March 2026 19:25:50 -0400 (0:00:00.309) 0:03:25.277 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Verify that encryption is correctly set] ********************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:30 Saturday 28 March 2026 19:25:50 -0400 (0:00:00.298) 0:03:25.575 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.encryption", "skip_reason": "Conditional result was False" } TASK [Verify that Clevis/Tang encryption is correctly set] ********************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:39 Saturday 28 March 2026 19:25:50 -0400 (0:00:00.124) 0:03:25.700 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.encryption", "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:49 Saturday 28 March 2026 19:25:50 -0400 (0:00:00.248) 0:03:25.949 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_stratis_report": null }, "changed": false } TASK [Clean up test variables] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:106 Saturday 28 March 2026 19:25:51 -0400 (0:00:00.328) 0:03:26.278 ******** ok: [managed-node12] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3 Saturday 28 March 2026 19:25:51 -0400 (0:00:00.294) 0:03:26.573 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node12 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'part_type': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=38081a08-cf75-4f9b-b424-e901b7e9592f', '_kernel_device': '/dev/dm-6', '_raw_kernel_device': '/dev/dm-6'}) TASK [Set storage volume test variables] *************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2 Saturday 28 March 2026 19:25:51 -0400 (0:00:00.514) 0:03:27.087 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_volume_present": true, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [Run test verify for storage_test_volume_subset] ************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19 Saturday 28 March 2026 19:25:52 -0400 (0:00:00.340) 0:03:27.428 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node12 => (item=mount) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node12 => (item=fstab) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node12 => (item=fs) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node12 => (item=device) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node12 => (item=encryption) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node12 => (item=md) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node12 => (item=size) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node12 => (item=cache) TASK [Get expected mount device based on device type] ************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7 Saturday 28 March 2026 19:25:54 -0400 (0:00:02.201) 0:03:29.629 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_device_path": "/dev/stratis/foo/test1" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11 Saturday 28 March 2026 19:25:54 -0400 (0:00:00.392) 0:03:30.022 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_mount_expected_mount_point": "/opt/test1", "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Get information about the mountpoint directory] ************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19 Saturday 28 March 2026 19:25:55 -0400 (0:00:00.366) 0:03:30.388 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "(not storage_test_volume.mount_user is none and storage_test_volume.mount_user | length > 0) or (not storage_test_volume.mount_group is none and storage_test_volume.mount_group | length > 0) or (not storage_test_volume.mount_mode is none and storage_test_volume.mount_mode | length > 0)", "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by device] ******************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:32 Saturday 28 March 2026 19:25:55 -0400 (0:00:00.440) 0:03:30.828 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Verify mount directory user] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:40 Saturday 28 March 2026 19:25:56 -0400 (0:00:00.347) 0:03:31.175 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "not storage_test_volume.mount_user is none", "skip_reason": "Conditional result was False" } TASK [Verify mount directory group] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:51 Saturday 28 March 2026 19:25:56 -0400 (0:00:00.383) 0:03:31.559 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "not storage_test_volume.mount_group is none", "skip_reason": "Conditional result was False" } TASK [Verify mount directory permissions] ************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:62 Saturday 28 March 2026 19:25:56 -0400 (0:00:00.398) 0:03:31.958 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "not storage_test_volume.mount_mode is none", "skip_reason": "Conditional result was False" } TASK [Get path of test volume device] ****************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:76 Saturday 28 March 2026 19:25:57 -0400 (0:00:00.383) 0:03:32.341 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:82 Saturday 28 March 2026 19:25:57 -0400 (0:00:00.253) 0:03:32.607 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:88 Saturday 28 March 2026 19:25:57 -0400 (0:00:00.279) 0:03:32.887 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:98 Saturday 28 March 2026 19:25:58 -0400 (0:00:00.277) 0:03:33.164 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_found_mount_stat": null, "storage_test_mount_expected_mount_point": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2 Saturday 28 March 2026 19:25:58 -0400 (0:00:00.245) 0:03:33.410 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "1", "storage_test_fstab_expected_mount_options_matches": "1", "storage_test_fstab_expected_mount_point_matches": "1", "storage_test_fstab_id_matches": [ "UUID=38081a08-cf75-4f9b-b424-e901b7e9592f " ], "storage_test_fstab_mount_options_matches": [ " /opt/test1 xfs defaults " ], "storage_test_fstab_mount_point_matches": [ " /opt/test1 " ] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17 Saturday 28 March 2026 19:25:58 -0400 (0:00:00.650) 0:03:34.060 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Verify the fstab mount point] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24 Saturday 28 March 2026 19:25:59 -0400 (0:00:00.349) 0:03:34.409 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33 Saturday 28 March 2026 19:25:59 -0400 (0:00:00.306) 0:03:34.716 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "__storage_verify_mount_options | d(false)", "skip_reason": "Conditional result was False" } TASK [Verify fingerprint] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45 Saturday 28 March 2026 19:25:59 -0400 (0:00:00.286) 0:03:35.003 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Clean up variables] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:52 Saturday 28 March 2026 19:26:00 -0400 (0:00:00.407) 0:03:35.410 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6 Saturday 28 March 2026 19:26:00 -0400 (0:00:00.290) 0:03:35.701 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type != \"stratis\"", "skip_reason": "Conditional result was False" } TASK [Verify fs label] ********************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14 Saturday 28 March 2026 19:26:00 -0400 (0:00:00.285) 0:03:35.986 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type != \"stratis\"", "skip_reason": "Conditional result was False" } TASK [See whether the device node is present] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3 Saturday 28 March 2026 19:26:01 -0400 (0:00:00.218) 0:03:36.205 ******** ok: [managed-node12] => { "changed": false, "stat": { "atime": 1774740232.0172572, "attr_flags": "", "attributes": [], "block_size": 512, "blocks": 0, "charset": "binary", "ctime": 1774740232.0172572, "dev": 6, "device_type": 64774, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 1496, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/symlink", "mode": "0660", "mtime": 1774740232.0172572, "nlink": 1, "path": "/dev/stratis/foo/test1", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9 Saturday 28 March 2026 19:26:02 -0400 (0:00:01.102) 0:03:37.307 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Verify the presence/absence of the device node - 2] ********************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16 Saturday 28 March 2026 19:26:02 -0400 (0:00:00.277) 0:03:37.584 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "not (_storage_test_volume_present or storage_test_volume.type == 'disk')", "skip_reason": "Conditional result was False" } TASK [Make sure we got info about this volume] ********************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23 Saturday 28 March 2026 19:26:02 -0400 (0:00:00.277) 0:03:37.861 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Process volume type (set initial value) (1/2)] *************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29 Saturday 28 March 2026 19:26:03 -0400 (0:00:00.267) 0:03:38.129 ******** ok: [managed-node12] => { "ansible_facts": { "st_volume_type": "stratis" }, "changed": false } TASK [Process volume type (get RAID value) (2/2)] ****************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33 Saturday 28 March 2026 19:26:03 -0400 (0:00:00.279) 0:03:38.409 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == \"raid\"", "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38 Saturday 28 March 2026 19:26:03 -0400 (0:00:00.190) 0:03:38.600 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Stat the LUKS device, if encrypted] ************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3 Saturday 28 March 2026 19:26:03 -0400 (0:00:00.264) 0:03:38.864 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10 Saturday 28 March 2026 19:26:03 -0400 (0:00:00.230) 0:03:39.095 ******** ok: [managed-node12] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: cryptsetup TASK [Collect LUKS info for this volume] *************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16 Saturday 28 March 2026 19:26:05 -0400 (0:00:01.446) 0:03:40.542 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.encryption and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22 Saturday 28 March 2026 19:26:05 -0400 (0:00:00.245) 0:03:40.787 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29 Saturday 28 March 2026 19:26:05 -0400 (0:00:00.231) 0:03:41.019 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40 Saturday 28 March 2026 19:26:06 -0400 (0:00:00.305) 0:03:41.324 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46 Saturday 28 March 2026 19:26:06 -0400 (0:00:00.202) 0:03:41.527 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51 Saturday 28 March 2026 19:26:06 -0400 (0:00:00.240) 0:03:41.768 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:64 Saturday 28 March 2026 19:26:06 -0400 (0:00:00.231) 0:03:42.005 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:77 Saturday 28 March 2026 19:26:07 -0400 (0:00:00.245) 0:03:42.251 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Set test variables] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:90 Saturday 28 March 2026 19:26:07 -0400 (0:00:00.197) 0:03:42.448 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:96 Saturday 28 March 2026 19:26:07 -0400 (0:00:00.362) 0:03:42.811 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:103 Saturday 28 March 2026 19:26:08 -0400 (0:00:00.332) 0:03:43.144 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:111 Saturday 28 March 2026 19:26:08 -0400 (0:00:00.233) 0:03:43.377 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:119 Saturday 28 March 2026 19:26:08 -0400 (0:00:00.238) 0:03:43.619 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:127 Saturday 28 March 2026 19:26:08 -0400 (0:00:00.224) 0:03:43.844 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [Get information about RAID] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8 Saturday 28 March 2026 19:26:08 -0400 (0:00:00.238) 0:03:44.082 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14 Saturday 28 March 2026 19:26:09 -0400 (0:00:00.252) 0:03:44.335 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19 Saturday 28 March 2026 19:26:09 -0400 (0:00:00.263) 0:03:44.598 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24 Saturday 28 March 2026 19:26:09 -0400 (0:00:00.248) 0:03:44.846 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set chunk size regex] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29 Saturday 28 March 2026 19:26:09 -0400 (0:00:00.208) 0:03:45.055 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37 Saturday 28 March 2026 19:26:10 -0400 (0:00:00.217) 0:03:45.273 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46 Saturday 28 March 2026 19:26:10 -0400 (0:00:00.205) 0:03:45.479 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54 Saturday 28 March 2026 19:26:10 -0400 (0:00:00.195) 0:03:45.675 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62 Saturday 28 March 2026 19:26:10 -0400 (0:00:00.192) 0:03:45.867 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70 Saturday 28 March 2026 19:26:10 -0400 (0:00:00.204) 0:03:46.071 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Parse the actual size of the volume] ************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3 Saturday 28 March 2026 19:26:11 -0400 (0:00:00.216) 0:03:46.287 ******** ok: [managed-node12] => { "bytes": 4294967296, "changed": false, "lvm": "4g", "parted": "4GiB", "size": "4 GiB" } TASK [Parse the requested size of the volume] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11 Saturday 28 March 2026 19:26:12 -0400 (0:00:01.016) 0:03:47.312 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == \"lvm\"", "skip_reason": "Conditional result was False" } TASK [Establish base value for expected size] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20 Saturday 28 March 2026 19:26:12 -0400 (0:00:00.212) 0:03:47.524 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == \"lvm\"", "skip_reason": "Conditional result was False" } TASK [Show expected size] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28 Saturday 28 March 2026 19:26:12 -0400 (0:00:00.226) 0:03:47.750 ******** ok: [managed-node12] => { "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined" } TASK [Get the size of parent/pool device] ************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32 Saturday 28 March 2026 19:26:12 -0400 (0:00:00.235) 0:03:47.986 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == \"lvm\"", "skip_reason": "Conditional result was False" } TASK [Show test pool] ********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46 Saturday 28 March 2026 19:26:13 -0400 (0:00:00.283) 0:03:48.270 ******** skipping: [managed-node12] => { "false_condition": "storage_test_volume.type == \"lvm\"" } TASK [Show test blockinfo] ***************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50 Saturday 28 March 2026 19:26:13 -0400 (0:00:00.204) 0:03:48.474 ******** skipping: [managed-node12] => { "false_condition": "storage_test_volume.type == \"lvm\"" } TASK [Show test pool size] ***************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54 Saturday 28 March 2026 19:26:13 -0400 (0:00:00.193) 0:03:48.667 ******** skipping: [managed-node12] => { "false_condition": "storage_test_volume.type == \"lvm\"" } TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58 Saturday 28 March 2026 19:26:13 -0400 (0:00:00.240) 0:03:48.908 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == \"lvm\"", "skip_reason": "Conditional result was False" } TASK [Default thin pool reserved space values] ********************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:68 Saturday 28 March 2026 19:26:14 -0400 (0:00:00.243) 0:03:49.151 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Default minimal thin pool reserved space size] *************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:72 Saturday 28 March 2026 19:26:14 -0400 (0:00:00.280) 0:03:49.432 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Default maximal thin pool reserved space size] *************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:77 Saturday 28 March 2026 19:26:14 -0400 (0:00:00.301) 0:03:49.734 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Calculate maximum usable space in thin pool] ***************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:83 Saturday 28 March 2026 19:26:14 -0400 (0:00:00.289) 0:03:50.023 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Apply upper size limit to max usable thin pool space] ******************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:87 Saturday 28 March 2026 19:26:15 -0400 (0:00:00.272) 0:03:50.296 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Apply lower size limit to max usable thin pool space] ******************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:92 Saturday 28 March 2026 19:26:15 -0400 (0:00:00.237) 0:03:50.536 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Convert maximum usable thin pool space from int to Size] ***************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:97 Saturday 28 March 2026 19:26:15 -0400 (0:00:00.226) 0:03:50.762 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Show max thin pool size] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:102 Saturday 28 March 2026 19:26:15 -0400 (0:00:00.229) 0:03:50.992 ******** skipping: [managed-node12] => { "false_condition": "storage_test_volume.thin | bool" } TASK [Show volume thin pool size] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:106 Saturday 28 March 2026 19:26:16 -0400 (0:00:00.354) 0:03:51.347 ******** skipping: [managed-node12] => { "false_condition": "storage_test_volume.thin | bool" } TASK [Show test volume size] *************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:110 Saturday 28 March 2026 19:26:16 -0400 (0:00:00.240) 0:03:51.587 ******** skipping: [managed-node12] => { "false_condition": "storage_test_volume.thin | bool" } TASK [Establish base value for expected thin pool size] ************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:114 Saturday 28 March 2026 19:26:16 -0400 (0:00:00.261) 0:03:51.849 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Calculate the expected size based on pool size and percentage value - 2] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:121 Saturday 28 March 2026 19:26:16 -0400 (0:00:00.228) 0:03:52.077 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Establish base value for expected thin pool volume size] ***************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:128 Saturday 28 March 2026 19:26:17 -0400 (0:00:00.234) 0:03:52.311 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Calculate the expected thin pool volume size based on percentage value] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:132 Saturday 28 March 2026 19:26:17 -0400 (0:00:00.243) 0:03:52.555 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Replace expected volume size with calculated value] ********************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:138 Saturday 28 March 2026 19:26:17 -0400 (0:00:00.216) 0:03:52.771 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Show actual size] ******************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:144 Saturday 28 March 2026 19:26:17 -0400 (0:00:00.249) 0:03:53.020 ******** ok: [managed-node12] => { "storage_test_actual_size": { "bytes": 4294967296, "changed": false, "failed": false, "lvm": "4g", "parted": "4GiB", "size": "4 GiB" } } TASK [Show expected size - 2] ************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:148 Saturday 28 March 2026 19:26:18 -0400 (0:00:00.199) 0:03:53.220 ******** ok: [managed-node12] => { "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined" } TASK [Assert expected size is actual size] ************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:152 Saturday 28 March 2026 19:26:18 -0400 (0:00:00.238) 0:03:53.465 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == \"lvm\"", "skip_reason": "Conditional result was False" } TASK [Get information about the LV] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5 Saturday 28 March 2026 19:26:18 -0400 (0:00:00.197) 0:03:53.662 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13 Saturday 28 March 2026 19:26:18 -0400 (0:00:00.201) 0:03:53.864 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17 Saturday 28 March 2026 19:26:18 -0400 (0:00:00.203) 0:03:54.068 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Set LV cache size] ******************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24 Saturday 28 March 2026 19:26:19 -0400 (0:00:00.200) 0:03:54.268 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Parse the requested cache size] ****************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31 Saturday 28 March 2026 19:26:19 -0400 (0:00:00.198) 0:03:54.466 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Set expected cache size] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37 Saturday 28 March 2026 19:26:19 -0400 (0:00:00.217) 0:03:54.684 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42 Saturday 28 March 2026 19:26:19 -0400 (0:00:00.231) 0:03:54.915 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25 Saturday 28 March 2026 19:26:20 -0400 (0:00:00.210) 0:03:55.125 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:43 Saturday 28 March 2026 19:26:20 -0400 (0:00:00.165) 0:03:55.290 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Clean up variable namespace] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:52 Saturday 28 March 2026 19:26:20 -0400 (0:00:00.157) 0:03:55.448 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Add second filesystem to the pool] *************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:90 Saturday 28 March 2026 19:26:20 -0400 (0:00:00.133) 0:03:55.587 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tasks/run_role_with_clear_facts.yml for managed-node12 TASK [Clear facts] ************************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tasks/run_role_with_clear_facts.yml:10 Saturday 28 March 2026 19:26:20 -0400 (0:00:00.387) 0:03:55.974 ******** META: facts cleared TASK [Run the role] ************************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tasks/run_role_with_clear_facts.yml:24 Saturday 28 March 2026 19:26:20 -0400 (0:00:00.066) 0:03:56.041 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "__sr_failed_when is defined", "skip_reason": "Conditional result was False" } TASK [Run the role normally] *************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tasks/run_role_with_clear_facts.yml:34 Saturday 28 March 2026 19:26:21 -0400 (0:00:00.195) 0:03:56.236 ******** included: fedora.linux_system_roles.storage for managed-node12 TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Saturday 28 March 2026 19:26:21 -0400 (0:00:00.264) 0:03:56.501 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node12 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Saturday 28 March 2026 19:26:21 -0400 (0:00:00.232) 0:03:56.734 ******** ok: [managed-node12] TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Saturday 28 March 2026 19:26:23 -0400 (0:00:01.774) 0:03:58.508 ******** skipping: [managed-node12] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node12] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node12] => (item=CentOS_10.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-fs", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "xfsprogs", "stratisd", "stratis-cli", "{{ 'libblockdev-s390' if ansible_facts['architecture'] == 's390x' else 'libblockdev' }}", "vdo" ] }, "ansible_included_var_files": [ "/tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node12] => (item=CentOS_10.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-fs", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "xfsprogs", "stratisd", "stratis-cli", "{{ 'libblockdev-s390' if ansible_facts['architecture'] == 's390x' else 'libblockdev' }}", "vdo" ] }, "ansible_included_var_files": [ "/tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Saturday 28 March 2026 19:26:23 -0400 (0:00:00.485) 0:03:58.994 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "not __storage_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Saturday 28 March 2026 19:26:24 -0400 (0:00:00.232) 0:03:59.226 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "not __storage_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Saturday 28 March 2026 19:26:24 -0400 (0:00:00.228) 0:03:59.455 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Saturday 28 March 2026 19:26:24 -0400 (0:00:00.187) 0:03:59.642 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Saturday 28 March 2026 19:26:24 -0400 (0:00:00.196) 0:03:59.839 ******** redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node12 TASK [fedora.linux_system_roles.storage : Add repo key] ************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Saturday 28 March 2026 19:26:25 -0400 (0:00:00.464) 0:04:00.304 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_blivet_custom_repo.key is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Add blivet repo] ********************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:15 Saturday 28 March 2026 19:26:25 -0400 (0:00:00.170) 0:04:00.475 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_blivet_custom_repo.key is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:20 Saturday 28 March 2026 19:26:25 -0400 (0:00:00.198) 0:04:00.673 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:27 Saturday 28 March 2026 19:26:25 -0400 (0:00:00.212) 0:04:00.885 ******** ok: [managed-node12] => { "storage_pools | d([])": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "name": "foo", "type": "stratis", "volumes": [ { "mount_point": "/opt/test1", "name": "test1", "size": "4g" }, { "mount_point": "/opt/test2", "name": "test2", "size": "4g" } ] } ] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:32 Saturday 28 March 2026 19:26:26 -0400 (0:00:00.259) 0:04:01.145 ******** ok: [managed-node12] => { "storage_volumes | d([])": [] } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37 Saturday 28 March 2026 19:26:26 -0400 (0:00:00.204) 0:04:01.349 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:50 Saturday 28 March 2026 19:26:26 -0400 (0:00:00.178) 0:04:01.527 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:56 Saturday 28 March 2026 19:26:26 -0400 (0:00:00.172) 0:04:01.700 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:76 Saturday 28 March 2026 19:26:26 -0400 (0:00:00.210) 0:04:01.911 ******** ok: [managed-node12] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "apt-daily.service": { "name": "apt-daily.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fcoe.service": { "name": "fcoe.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "iscsi-shutdown.service": { "name": "iscsi-shutdown.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsi.service": { "name": "iscsi.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsid.service": { "name": "iscsid.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-activation-early.service": { "name": "lvm2-activation-early.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_multipath.service": { "name": "modprobe@dm_multipath.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "raid-check.service": { "name": "raid-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "rbdmap.service": { "name": "rbdmap.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-unix-local@.service": { "name": "sshd-unix-local@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd-vsock@.service": { "name": "sshd-vsock@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "stratis-fstab-setup-with-network@.service": { "name": "stratis-fstab-setup-with-network@.service", "source": "systemd", "state": "unknown", "status": "static" }, "stratis-fstab-setup@.service": { "name": "stratis-fstab-setup@.service", "source": "systemd", "state": "unknown", "status": "static" }, "stratisd-min-postinitrd.service": { "name": "stratisd-min-postinitrd.service", "source": "systemd", "state": "inactive", "status": "static" }, "stratisd.service": { "name": "stratisd.service", "source": "systemd", "state": "running", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-userdbd.service": { "name": "systemd-userdbd.service", "source": "systemd", "state": "running", "status": "indirect" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:82 Saturday 28 March 2026 19:26:29 -0400 (0:00:02.670) 0:04:04.581 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:88 Saturday 28 March 2026 19:26:29 -0400 (0:00:00.248) 0:04:04.829 ******** changed: [managed-node12] => { "actions": [ { "action": "create device", "device": "/dev/stratis/foo/test2", "fs_type": null }, { "action": "create format", "device": "/dev/stratis/foo/test2", "fs_type": "stratis xfs" } ], "changed": true, "crypts": [], "leaves": [ "/dev/stratis/foo/test1", "/dev/xvda1", "/dev/xvda2", "/dev/stratis/foo/test2" ], "mounts": [ { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "UUID=38081a08-cf75-4f9b-b424-e901b7e9592f", "state": "mounted" }, { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test2", "src": "UUID=e520ef01-29b1-479d-a588-3bb5ac0d55d9", "state": "mounted" } ], "packages": [ "stratis-cli", "stratisd", "xfsprogs" ], "pools": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "stratis", "volumes": [ { "_device": "/dev/stratis/foo/test1", "_kernel_device": "/dev/dm-6", "_mount_id": "UUID=38081a08-cf75-4f9b-b424-e901b7e9592f", "_raw_device": "/dev/stratis/foo/test1", "_raw_kernel_device": "/dev/dm-6", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null }, { "_device": "/dev/stratis/foo/test2", "_kernel_device": "/dev/dm-7", "_mount_id": "UUID=e520ef01-29b1-479d-a588-3bb5ac0d55d9", "_raw_device": "/dev/stratis/foo/test2", "_raw_kernel_device": "/dev/dm-7", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "mount_user": null, "name": "test2", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:103 Saturday 28 March 2026 19:26:48 -0400 (0:00:19.162) 0:04:23.992 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_udevadm_trigger | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ****** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:110 Saturday 28 March 2026 19:26:49 -0400 (0:00:00.108) 0:04:24.101 ******** ok: [managed-node12] => { "changed": false, "stat": { "atime": 1774740241.5293837, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "b4ba8a954ffa41e5183e741617ef38130014deca", "ctime": 1774740241.5267115, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 230686985, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1774740241.5267115, "nlink": 1, "path": "/etc/fstab", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1478, "uid": 0, "version": "1679064280", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:115 Saturday 28 March 2026 19:26:49 -0400 (0:00:00.815) 0:04:24.917 ******** ok: [managed-node12] => { "backup": "", "changed": false } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:133 Saturday 28 March 2026 19:26:50 -0400 (0:00:00.891) 0:04:25.808 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:139 Saturday 28 March 2026 19:26:51 -0400 (0:00:00.317) 0:04:26.126 ******** ok: [managed-node12] => { "blivet_output": { "actions": [ { "action": "create device", "device": "/dev/stratis/foo/test2", "fs_type": null }, { "action": "create format", "device": "/dev/stratis/foo/test2", "fs_type": "stratis xfs" } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/stratis/foo/test1", "/dev/xvda1", "/dev/xvda2", "/dev/stratis/foo/test2" ], "mounts": [ { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "UUID=38081a08-cf75-4f9b-b424-e901b7e9592f", "state": "mounted" }, { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test2", "src": "UUID=e520ef01-29b1-479d-a588-3bb5ac0d55d9", "state": "mounted" } ], "packages": [ "stratis-cli", "stratisd", "xfsprogs" ], "pools": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "stratis", "volumes": [ { "_device": "/dev/stratis/foo/test1", "_kernel_device": "/dev/dm-6", "_mount_id": "UUID=38081a08-cf75-4f9b-b424-e901b7e9592f", "_raw_device": "/dev/stratis/foo/test1", "_raw_kernel_device": "/dev/dm-6", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null }, { "_device": "/dev/stratis/foo/test2", "_kernel_device": "/dev/dm-7", "_mount_id": "UUID=e520ef01-29b1-479d-a588-3bb5ac0d55d9", "_raw_device": "/dev/stratis/foo/test2", "_raw_kernel_device": "/dev/dm-7", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "mount_user": null, "name": "test2", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148 Saturday 28 March 2026 19:26:51 -0400 (0:00:00.225) 0:04:26.351 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "stratis", "volumes": [ { "_device": "/dev/stratis/foo/test1", "_kernel_device": "/dev/dm-6", "_mount_id": "UUID=38081a08-cf75-4f9b-b424-e901b7e9592f", "_raw_device": "/dev/stratis/foo/test1", "_raw_kernel_device": "/dev/dm-6", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null }, { "_device": "/dev/stratis/foo/test2", "_kernel_device": "/dev/dm-7", "_mount_id": "UUID=e520ef01-29b1-479d-a588-3bb5ac0d55d9", "_raw_device": "/dev/stratis/foo/test2", "_raw_kernel_device": "/dev/dm-7", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "mount_user": null, "name": "test2", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:152 Saturday 28 March 2026 19:26:51 -0400 (0:00:00.227) 0:04:26.578 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:168 Saturday 28 March 2026 19:26:51 -0400 (0:00:00.108) 0:04:26.687 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:179 Saturday 28 March 2026 19:26:51 -0400 (0:00:00.122) 0:04:26.810 ******** ok: [managed-node12] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:184 Saturday 28 March 2026 19:26:53 -0400 (0:00:01.518) 0:04:28.329 ******** redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount ok: [managed-node12] => (item={'src': 'UUID=38081a08-cf75-4f9b-b424-e901b7e9592f', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => { "ansible_loop_var": "mount_info", "backup_file": "", "boot": "yes", "changed": false, "dump": "0", "fstab": "/etc/fstab", "fstype": "xfs", "mount_info": { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "UUID=38081a08-cf75-4f9b-b424-e901b7e9592f", "state": "mounted" }, "name": "/opt/test1", "opts": "defaults", "passno": "0", "src": "UUID=38081a08-cf75-4f9b-b424-e901b7e9592f" } redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount changed: [managed-node12] => (item={'src': 'UUID=e520ef01-29b1-479d-a588-3bb5ac0d55d9', 'path': '/opt/test2', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => { "ansible_loop_var": "mount_info", "backup_file": "", "boot": "yes", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "xfs", "mount_info": { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test2", "src": "UUID=e520ef01-29b1-479d-a588-3bb5ac0d55d9", "state": "mounted" }, "name": "/opt/test2", "opts": "defaults", "passno": "0", "src": "UUID=e520ef01-29b1-479d-a588-3bb5ac0d55d9" } TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195 Saturday 28 March 2026 19:26:54 -0400 (0:00:01.474) 0:04:29.803 ******** skipping: [managed-node12] => (item={'src': 'UUID=38081a08-cf75-4f9b-b424-e901b7e9592f', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => { "ansible_loop_var": "mount_info", "changed": false, "false_condition": "mount_info['owner'] != none or mount_info['group'] != none or mount_info['mode'] != none", "mount_info": { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "UUID=38081a08-cf75-4f9b-b424-e901b7e9592f", "state": "mounted" }, "skip_reason": "Conditional result was False" } skipping: [managed-node12] => (item={'src': 'UUID=e520ef01-29b1-479d-a588-3bb5ac0d55d9', 'path': '/opt/test2', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => { "ansible_loop_var": "mount_info", "changed": false, "false_condition": "mount_info['owner'] != none or mount_info['group'] != none or mount_info['mode'] != none", "mount_info": { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test2", "src": "UUID=e520ef01-29b1-479d-a588-3bb5ac0d55d9", "state": "mounted" }, "skip_reason": "Conditional result was False" } skipping: [managed-node12] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:207 Saturday 28 March 2026 19:26:55 -0400 (0:00:00.310) 0:04:30.114 ******** ok: [managed-node12] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:215 Saturday 28 March 2026 19:26:56 -0400 (0:00:01.199) 0:04:31.313 ******** ok: [managed-node12] => { "changed": false, "stat": { "atime": 1774737762.6954868, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1774342704.475, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 4194435, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1774342323.629, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "1224473831", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:220 Saturday 28 March 2026 19:26:56 -0400 (0:00:00.700) 0:04:32.014 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:242 Saturday 28 March 2026 19:26:57 -0400 (0:00:00.101) 0:04:32.115 ******** ok: [managed-node12] TASK [Verify role results - 3] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:105 Saturday 28 March 2026 19:26:58 -0400 (0:00:01.484) 0:04:33.601 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node12 TASK [Print out pool information] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2 Saturday 28 March 2026 19:26:58 -0400 (0:00:00.267) 0:04:33.869 ******** ok: [managed-node12] => { "_storage_pools_list": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "stratis", "volumes": [ { "_device": "/dev/stratis/foo/test1", "_kernel_device": "/dev/dm-6", "_mount_id": "UUID=38081a08-cf75-4f9b-b424-e901b7e9592f", "_raw_device": "/dev/stratis/foo/test1", "_raw_kernel_device": "/dev/dm-6", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null }, { "_device": "/dev/stratis/foo/test2", "_kernel_device": "/dev/dm-7", "_mount_id": "UUID=e520ef01-29b1-479d-a588-3bb5ac0d55d9", "_raw_device": "/dev/stratis/foo/test2", "_raw_kernel_device": "/dev/dm-7", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "mount_user": null, "name": "test2", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } ] } ] } TASK [Print out volume information] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7 Saturday 28 March 2026 19:26:58 -0400 (0:00:00.165) 0:04:34.034 ******** skipping: [managed-node12] => { "false_condition": "_storage_volumes_list | length > 0" } TASK [Collect info about the volumes.] ***************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15 Saturday 28 March 2026 19:26:59 -0400 (0:00:00.085) 0:04:34.119 ******** ok: [managed-node12] => { "changed": false, "info": { "/dev/loop0": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/loop0", "size": "0B", "type": "loop", "uuid": "" }, "/dev/mapper/stratis-1-private-98c86bd3ff4a4ace81fea7a7d8071f6f-flex-mdv": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/mapper/stratis-1-private-98c86bd3ff4a4ace81fea7a7d8071f6f-flex-mdv", "size": "512M", "type": "stratis", "uuid": "" }, "/dev/mapper/stratis-1-private-98c86bd3ff4a4ace81fea7a7d8071f6f-flex-thindata": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/mapper/stratis-1-private-98c86bd3ff4a4ace81fea7a7d8071f6f-flex-thindata", "size": "50G", "type": "stratis", "uuid": "" }, "/dev/mapper/stratis-1-private-98c86bd3ff4a4ace81fea7a7d8071f6f-flex-thinmeta": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/mapper/stratis-1-private-98c86bd3ff4a4ace81fea7a7d8071f6f-flex-thinmeta", "size": "786M", "type": "stratis", "uuid": "" }, "/dev/mapper/stratis-1-private-98c86bd3ff4a4ace81fea7a7d8071f6f-physical-cache": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/mapper/stratis-1-private-98c86bd3ff4a4ace81fea7a7d8071f6f-physical-cache", "size": "52.1G", "type": "stratis", "uuid": "" }, "/dev/mapper/stratis-1-private-98c86bd3ff4a4ace81fea7a7d8071f6f-physical-originsub": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/mapper/stratis-1-private-98c86bd3ff4a4ace81fea7a7d8071f6f-physical-originsub", "size": "52.1G", "type": "stratis", "uuid": "" }, "/dev/mapper/stratis-1-private-98c86bd3ff4a4ace81fea7a7d8071f6f-thinpool-pool": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/mapper/stratis-1-private-98c86bd3ff4a4ace81fea7a7d8071f6f-thinpool-pool", "size": "50G", "type": "stratis", "uuid": "" }, "/dev/sda": { "fstype": "stratis", "label": "", "mountpoint": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "26de8ef7-1afd-484c-8227-390830b9f791" }, "/dev/sdb": { "fstype": "stratis", "label": "", "mountpoint": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "c1b3a6f2-c13b-473a-8a6e-3ef837ed2f6e" }, "/dev/sdc": { "fstype": "stratis", "label": "", "mountpoint": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "41fb917c-951e-41b2-8c83-03a53bb3c710" }, "/dev/sdd": { "fstype": "stratis", "label": "", "mountpoint": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "926c9bd5-a194-4984-9b26-e2a194676c27" }, "/dev/sde": { "fstype": "stratis", "label": "", "mountpoint": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "755bef06-2819-4fe2-8faf-f6eee9e80010" }, "/dev/sdf": { "fstype": "stratis", "label": "", "mountpoint": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "9489957b-dcba-4642-bfad-4733daeb8844" }, "/dev/sdg": { "fstype": "stratis", "label": "", "mountpoint": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "9795169f-078b-4914-a09d-19e69dc3300e" }, "/dev/sdh": { "fstype": "stratis", "label": "", "mountpoint": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "b094d332-a66e-4a40-ab09-7ae241f2aeff" }, "/dev/sdi": { "fstype": "stratis", "label": "", "mountpoint": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "46644e71-1703-4adf-beee-bda271fc3b06" }, "/dev/stratis/foo/test1": { "fstype": "xfs", "label": "", "mountpoint": "/opt/test1", "name": "/dev/stratis/foo/test1", "size": "4G", "type": "stratis", "uuid": "38081a08-cf75-4f9b-b424-e901b7e9592f" }, "/dev/stratis/foo/test2": { "fstype": "xfs", "label": "", "mountpoint": "/opt/test2", "name": "/dev/stratis/foo/test2", "size": "4G", "type": "stratis", "uuid": "e520ef01-29b1-479d-a588-3bb5ac0d55d9" }, "/dev/xvda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda1", "size": "1M", "type": "partition", "uuid": "" }, "/dev/xvda2": { "fstype": "xfs", "label": "", "mountpoint": "/", "name": "/dev/xvda2", "size": "250G", "type": "partition", "uuid": "94fc577e-f1df-44bb-8e86-63b9a68f8f7f" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20 Saturday 28 March 2026 19:26:59 -0400 (0:00:00.732) 0:04:34.851 ******** ok: [managed-node12] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.003517", "end": "2026-03-28 19:27:00.385932", "rc": 0, "start": "2026-03-28 19:27:00.382415" } STDOUT: # system_role:storage # # /etc/fstab # Created by anaconda on Tue Mar 24 08:52:03 2026 # # Accessible filesystems, by reference, are maintained under '/dev/disk/'. # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info. # # After editing this file, run 'systemctl daemon-reload' to update systemd # units generated from this file. # UUID=94fc577e-f1df-44bb-8e86-63b9a68f8f7f / xfs defaults 0 0 ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 UUID=38081a08-cf75-4f9b-b424-e901b7e9592f /opt/test1 xfs defaults 0 0 UUID=e520ef01-29b1-479d-a588-3bb5ac0d55d9 /opt/test2 xfs defaults 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25 Saturday 28 March 2026 19:27:00 -0400 (0:00:00.766) 0:04:35.618 ******** ok: [managed-node12] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.002982", "end": "2026-03-28 19:27:01.116037", "failed_when_result": false, "rc": 0, "start": "2026-03-28 19:27:01.113055" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34 Saturday 28 March 2026 19:27:01 -0400 (0:00:00.722) 0:04:36.341 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node12 => (item={'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'stratis', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'part_type': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=38081a08-cf75-4f9b-b424-e901b7e9592f', '_kernel_device': '/dev/dm-6', '_raw_kernel_device': '/dev/dm-6'}, {'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test2', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'part_type': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test2', '_raw_device': '/dev/stratis/foo/test2', '_mount_id': 'UUID=e520ef01-29b1-479d-a588-3bb5ac0d55d9', '_kernel_device': '/dev/dm-7', '_raw_kernel_device': '/dev/dm-7'}]}) TASK [Set _storage_pool_tests] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5 Saturday 28 March 2026 19:27:01 -0400 (0:00:00.321) 0:04:36.662 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Get VG shared value status] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18 Saturday 28 March 2026 19:27:01 -0400 (0:00:00.120) 0:04:36.783 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'", "skip_reason": "Conditional result was False" } TASK [Verify that VG shared value checks out] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24 Saturday 28 March 2026 19:27:01 -0400 (0:00:00.151) 0:04:36.935 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'", "skip_reason": "Conditional result was False" } TASK [Verify pool subset] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34 Saturday 28 March 2026 19:27:01 -0400 (0:00:00.151) 0:04:37.086 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node12 => (item=members) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node12 => (item=volumes) TASK [Set test variables] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2 Saturday 28 March 2026 19:27:02 -0400 (0:00:00.214) 0:04:37.301 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Get the canonical device path for each member device] ******************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8 Saturday 28 March 2026 19:27:02 -0400 (0:00:00.055) 0:04:37.356 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Set pvs lvm length] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17 Saturday 28 March 2026 19:27:02 -0400 (0:00:00.068) 0:04:37.424 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Set pool pvs] ************************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22 Saturday 28 March 2026 19:27:02 -0400 (0:00:00.165) 0:04:37.590 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Verify PV count] ********************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27 Saturday 28 March 2026 19:27:02 -0400 (0:00:00.145) 0:04:37.735 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Set expected pv type] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36 Saturday 28 March 2026 19:27:02 -0400 (0:00:00.134) 0:04:37.869 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Set expected pv type - 2] ************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41 Saturday 28 March 2026 19:27:02 -0400 (0:00:00.121) 0:04:37.991 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm' and not storage_test_pool.encryption", "skip_reason": "Conditional result was False" } TASK [Set expected pv type - 3] ************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46 Saturday 28 March 2026 19:27:02 -0400 (0:00:00.103) 0:04:38.094 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:55 Saturday 28 March 2026 19:27:03 -0400 (0:00:00.121) 0:04:38.216 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Check that blivet supports PV grow to fill] ****************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:68 Saturday 28 March 2026 19:27:03 -0400 (0:00:00.073) 0:04:38.290 ******** ok: [managed-node12] => { "changed": false, "failed_when_result": false, "rc": 0 } STDERR: OpenSSH_9.9p1, OpenSSL 3.5.5 27 Jan 2026 debug1: Reading configuration data /root/.ssh/config debug1: Reading configuration data /etc/ssh/ssh_config debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf debug2: checking match for 'final all' host 10.31.11.27 originally 10.31.11.27 debug2: match not found debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config debug1: configuration requests final Match pass debug2: resolve_canonicalize: hostname 10.31.11.27 is address debug1: re-parsing configuration debug1: Reading configuration data /root/.ssh/config debug1: Reading configuration data /etc/ssh/ssh_config debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf debug2: checking match for 'final all' host 10.31.11.27 originally 10.31.11.27 debug2: match found debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config debug1: auto-mux: Trying existing master at '/root/.ansible/cp/e1141b92e1' debug2: fd 3 setting O_NONBLOCK debug2: mux_client_hello_exchange: master version 4 debug1: mux_client_request_session: master session id: 2 debug2: Received exit status from master 0 Shared connection to 10.31.11.27 closed. TASK [Verify that PVs fill the whole devices when they should] ***************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:78 Saturday 28 March 2026 19:27:03 -0400 (0:00:00.707) 0:04:38.997 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Check MD RAID] *********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:88 Saturday 28 March 2026 19:27:04 -0400 (0:00:00.114) 0:04:39.112 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node12 TASK [Get information about RAID] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8 Saturday 28 March 2026 19:27:04 -0400 (0:00:00.302) 0:04:39.415 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14 Saturday 28 March 2026 19:27:04 -0400 (0:00:00.078) 0:04:39.493 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19 Saturday 28 March 2026 19:27:04 -0400 (0:00:00.116) 0:04:39.610 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24 Saturday 28 March 2026 19:27:04 -0400 (0:00:00.188) 0:04:39.799 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set md chunk size regex] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29 Saturday 28 March 2026 19:27:04 -0400 (0:00:00.147) 0:04:39.946 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37 Saturday 28 March 2026 19:27:04 -0400 (0:00:00.131) 0:04:40.078 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46 Saturday 28 March 2026 19:27:05 -0400 (0:00:00.144) 0:04:40.223 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55 Saturday 28 March 2026 19:27:05 -0400 (0:00:00.106) 0:04:40.329 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64 Saturday 28 March 2026 19:27:05 -0400 (0:00:00.095) 0:04:40.425 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74 Saturday 28 March 2026 19:27:05 -0400 (0:00:00.140) 0:04:40.566 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83 Saturday 28 March 2026 19:27:05 -0400 (0:00:00.162) 0:04:40.728 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_chunk_size_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:91 Saturday 28 March 2026 19:27:05 -0400 (0:00:00.157) 0:04:40.885 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node12 TASK [Validate pool member LVM RAID settings] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2 Saturday 28 March 2026 19:27:06 -0400 (0:00:00.276) 0:04:41.166 ******** skipping: [managed-node12] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'part_type': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=38081a08-cf75-4f9b-b424-e901b7e9592f', '_kernel_device': '/dev/dm-6', '_raw_kernel_device': '/dev/dm-6'}) => { "ansible_loop_var": "storage_test_lvmraid_volume", "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False", "storage_test_lvmraid_volume": { "_device": "/dev/stratis/foo/test1", "_kernel_device": "/dev/dm-6", "_mount_id": "UUID=38081a08-cf75-4f9b-b424-e901b7e9592f", "_raw_device": "/dev/stratis/foo/test1", "_raw_kernel_device": "/dev/dm-6", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } } skipping: [managed-node12] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test2', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'part_type': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test2', '_raw_device': '/dev/stratis/foo/test2', '_mount_id': 'UUID=e520ef01-29b1-479d-a588-3bb5ac0d55d9', '_kernel_device': '/dev/dm-7', '_raw_kernel_device': '/dev/dm-7'}) => { "ansible_loop_var": "storage_test_lvmraid_volume", "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False", "storage_test_lvmraid_volume": { "_device": "/dev/stratis/foo/test2", "_kernel_device": "/dev/dm-7", "_mount_id": "UUID=e520ef01-29b1-479d-a588-3bb5ac0d55d9", "_raw_device": "/dev/stratis/foo/test2", "_raw_kernel_device": "/dev/dm-7", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "mount_user": null, "name": "test2", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } } skipping: [managed-node12] => { "changed": false } MSG: All items skipped TASK [Check Thin Pools] ******************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:94 Saturday 28 March 2026 19:27:06 -0400 (0:00:00.192) 0:04:41.358 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node12 TASK [Validate pool member thinpool settings] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2 Saturday 28 March 2026 19:27:06 -0400 (0:00:00.212) 0:04:41.571 ******** skipping: [managed-node12] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'part_type': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=38081a08-cf75-4f9b-b424-e901b7e9592f', '_kernel_device': '/dev/dm-6', '_raw_kernel_device': '/dev/dm-6'}) => { "ansible_loop_var": "storage_test_thin_volume", "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False", "storage_test_thin_volume": { "_device": "/dev/stratis/foo/test1", "_kernel_device": "/dev/dm-6", "_mount_id": "UUID=38081a08-cf75-4f9b-b424-e901b7e9592f", "_raw_device": "/dev/stratis/foo/test1", "_raw_kernel_device": "/dev/dm-6", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } } skipping: [managed-node12] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test2', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'part_type': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test2', '_raw_device': '/dev/stratis/foo/test2', '_mount_id': 'UUID=e520ef01-29b1-479d-a588-3bb5ac0d55d9', '_kernel_device': '/dev/dm-7', '_raw_kernel_device': '/dev/dm-7'}) => { "ansible_loop_var": "storage_test_thin_volume", "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False", "storage_test_thin_volume": { "_device": "/dev/stratis/foo/test2", "_kernel_device": "/dev/dm-7", "_mount_id": "UUID=e520ef01-29b1-479d-a588-3bb5ac0d55d9", "_raw_device": "/dev/stratis/foo/test2", "_raw_kernel_device": "/dev/dm-7", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "mount_user": null, "name": "test2", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } } skipping: [managed-node12] => { "changed": false } MSG: All items skipped TASK [Check member encryption] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:97 Saturday 28 March 2026 19:27:06 -0400 (0:00:00.176) 0:04:41.748 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node12 TASK [Set test variables] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5 Saturday 28 March 2026 19:27:07 -0400 (0:00:00.409) 0:04:42.158 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10 Saturday 28 March 2026 19:27:07 -0400 (0:00:00.195) 0:04:42.353 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Validate pool member crypttab entries] *********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17 Saturday 28 March 2026 19:27:07 -0400 (0:00:00.113) 0:04:42.466 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24 Saturday 28 March 2026 19:27:07 -0400 (0:00:00.094) 0:04:42.561 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:100 Saturday 28 March 2026 19:27:07 -0400 (0:00:00.153) 0:04:42.714 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node12 TASK [Validate pool member VDO settings] *************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2 Saturday 28 March 2026 19:27:07 -0400 (0:00:00.314) 0:04:43.029 ******** skipping: [managed-node12] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'part_type': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=38081a08-cf75-4f9b-b424-e901b7e9592f', '_kernel_device': '/dev/dm-6', '_raw_kernel_device': '/dev/dm-6'}) => { "ansible_loop_var": "storage_test_vdo_volume", "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False", "storage_test_vdo_volume": { "_device": "/dev/stratis/foo/test1", "_kernel_device": "/dev/dm-6", "_mount_id": "UUID=38081a08-cf75-4f9b-b424-e901b7e9592f", "_raw_device": "/dev/stratis/foo/test1", "_raw_kernel_device": "/dev/dm-6", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } } skipping: [managed-node12] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test2', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'part_type': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test2', '_raw_device': '/dev/stratis/foo/test2', '_mount_id': 'UUID=e520ef01-29b1-479d-a588-3bb5ac0d55d9', '_kernel_device': '/dev/dm-7', '_raw_kernel_device': '/dev/dm-7'}) => { "ansible_loop_var": "storage_test_vdo_volume", "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False", "storage_test_vdo_volume": { "_device": "/dev/stratis/foo/test2", "_kernel_device": "/dev/dm-7", "_mount_id": "UUID=e520ef01-29b1-479d-a588-3bb5ac0d55d9", "_raw_device": "/dev/stratis/foo/test2", "_raw_kernel_device": "/dev/dm-7", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "mount_user": null, "name": "test2", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } } skipping: [managed-node12] => { "changed": false } MSG: All items skipped TASK [Check Stratis] *********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:103 Saturday 28 March 2026 19:27:08 -0400 (0:00:00.209) 0:04:43.239 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node12 TASK [Get stratis pool information] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6 Saturday 28 March 2026 19:27:08 -0400 (0:00:00.364) 0:04:43.604 ******** ok: [managed-node12] => { "changed": false, "rc": 0 } STDOUT: {"name": "foo", "encrypted": false, "key_desc": null, "clevis_pin": null, "clevis_args": {}} STDERR: OpenSSH_9.9p1, OpenSSL 3.5.5 27 Jan 2026 debug1: Reading configuration data /root/.ssh/config debug1: Reading configuration data /etc/ssh/ssh_config debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf debug2: checking match for 'final all' host 10.31.11.27 originally 10.31.11.27 debug2: match not found debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config debug1: configuration requests final Match pass debug2: resolve_canonicalize: hostname 10.31.11.27 is address debug1: re-parsing configuration debug1: Reading configuration data /root/.ssh/config debug1: Reading configuration data /etc/ssh/ssh_config debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf debug2: checking match for 'final all' host 10.31.11.27 originally 10.31.11.27 debug2: match found debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config debug1: auto-mux: Trying existing master at '/root/.ansible/cp/e1141b92e1' debug2: fd 3 setting O_NONBLOCK debug2: mux_client_hello_exchange: master version 4 debug1: mux_client_request_session: master session id: 2 debug2: Received exit status from master 0 Shared connection to 10.31.11.27 closed. TASK [Print script output] ***************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15 Saturday 28 March 2026 19:27:09 -0400 (0:00:00.935) 0:04:44.539 ******** ok: [managed-node12] => {} MSG: {'name': 'foo', 'encrypted': False, 'key_desc': None, 'clevis_pin': None, 'clevis_args': {}} TASK [Get information about Stratis] ******************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:19 Saturday 28 March 2026 19:27:09 -0400 (0:00:00.188) 0:04:44.727 ******** ok: [managed-node12] => { "ansible_facts": { "_stratis_pool_info": { "clevis_args": {}, "clevis_pin": null, "encrypted": false, "key_desc": null, "name": "foo" } }, "changed": false } TASK [Verify that the pools was created] *************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:23 Saturday 28 March 2026 19:27:09 -0400 (0:00:00.221) 0:04:44.948 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Verify that encryption is correctly set] ********************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:30 Saturday 28 March 2026 19:27:10 -0400 (0:00:00.201) 0:04:45.150 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.encryption", "skip_reason": "Conditional result was False" } TASK [Verify that Clevis/Tang encryption is correctly set] ********************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:39 Saturday 28 March 2026 19:27:10 -0400 (0:00:00.160) 0:04:45.310 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.encryption", "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:49 Saturday 28 March 2026 19:27:10 -0400 (0:00:00.151) 0:04:45.462 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_stratis_report": null }, "changed": false } TASK [Clean up test variables] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:106 Saturday 28 March 2026 19:27:10 -0400 (0:00:00.176) 0:04:45.638 ******** ok: [managed-node12] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3 Saturday 28 March 2026 19:27:10 -0400 (0:00:00.180) 0:04:45.819 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node12 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'part_type': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=38081a08-cf75-4f9b-b424-e901b7e9592f', '_kernel_device': '/dev/dm-6', '_raw_kernel_device': '/dev/dm-6'}) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node12 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test2', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'part_type': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test2', '_raw_device': '/dev/stratis/foo/test2', '_mount_id': 'UUID=e520ef01-29b1-479d-a588-3bb5ac0d55d9', '_kernel_device': '/dev/dm-7', '_raw_kernel_device': '/dev/dm-7'}) TASK [Set storage volume test variables] *************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2 Saturday 28 March 2026 19:27:11 -0400 (0:00:00.371) 0:04:46.190 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_volume_present": true, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [Run test verify for storage_test_volume_subset] ************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19 Saturday 28 March 2026 19:27:11 -0400 (0:00:00.202) 0:04:46.392 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node12 => (item=mount) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node12 => (item=fstab) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node12 => (item=fs) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node12 => (item=device) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node12 => (item=encryption) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node12 => (item=md) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node12 => (item=size) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node12 => (item=cache) TASK [Get expected mount device based on device type] ************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7 Saturday 28 March 2026 19:27:12 -0400 (0:00:01.023) 0:04:47.416 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_device_path": "/dev/stratis/foo/test1" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11 Saturday 28 March 2026 19:27:12 -0400 (0:00:00.139) 0:04:47.555 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_mount_expected_mount_point": "/opt/test1", "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Get information about the mountpoint directory] ************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19 Saturday 28 March 2026 19:27:12 -0400 (0:00:00.176) 0:04:47.732 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "(not storage_test_volume.mount_user is none and storage_test_volume.mount_user | length > 0) or (not storage_test_volume.mount_group is none and storage_test_volume.mount_group | length > 0) or (not storage_test_volume.mount_mode is none and storage_test_volume.mount_mode | length > 0)", "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by device] ******************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:32 Saturday 28 March 2026 19:27:12 -0400 (0:00:00.272) 0:04:48.004 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Verify mount directory user] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:40 Saturday 28 March 2026 19:27:13 -0400 (0:00:00.227) 0:04:48.232 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "not storage_test_volume.mount_user is none", "skip_reason": "Conditional result was False" } TASK [Verify mount directory group] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:51 Saturday 28 March 2026 19:27:13 -0400 (0:00:00.231) 0:04:48.463 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "not storage_test_volume.mount_group is none", "skip_reason": "Conditional result was False" } TASK [Verify mount directory permissions] ************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:62 Saturday 28 March 2026 19:27:13 -0400 (0:00:00.243) 0:04:48.707 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "not storage_test_volume.mount_mode is none", "skip_reason": "Conditional result was False" } TASK [Get path of test volume device] ****************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:76 Saturday 28 March 2026 19:27:13 -0400 (0:00:00.226) 0:04:48.941 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:82 Saturday 28 March 2026 19:27:13 -0400 (0:00:00.139) 0:04:49.081 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:88 Saturday 28 March 2026 19:27:14 -0400 (0:00:00.128) 0:04:49.210 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:98 Saturday 28 March 2026 19:27:14 -0400 (0:00:00.180) 0:04:49.390 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_found_mount_stat": null, "storage_test_mount_expected_mount_point": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2 Saturday 28 March 2026 19:27:14 -0400 (0:00:00.140) 0:04:49.531 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "1", "storage_test_fstab_expected_mount_options_matches": "1", "storage_test_fstab_expected_mount_point_matches": "1", "storage_test_fstab_id_matches": [ "UUID=38081a08-cf75-4f9b-b424-e901b7e9592f " ], "storage_test_fstab_mount_options_matches": [ " /opt/test1 xfs defaults " ], "storage_test_fstab_mount_point_matches": [ " /opt/test1 " ] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17 Saturday 28 March 2026 19:27:14 -0400 (0:00:00.324) 0:04:49.856 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Verify the fstab mount point] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24 Saturday 28 March 2026 19:27:14 -0400 (0:00:00.168) 0:04:50.024 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33 Saturday 28 March 2026 19:27:15 -0400 (0:00:00.192) 0:04:50.216 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "__storage_verify_mount_options | d(false)", "skip_reason": "Conditional result was False" } TASK [Verify fingerprint] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45 Saturday 28 March 2026 19:27:15 -0400 (0:00:00.143) 0:04:50.360 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Clean up variables] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:52 Saturday 28 March 2026 19:27:15 -0400 (0:00:00.201) 0:04:50.561 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6 Saturday 28 March 2026 19:27:15 -0400 (0:00:00.149) 0:04:50.710 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type != \"stratis\"", "skip_reason": "Conditional result was False" } TASK [Verify fs label] ********************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14 Saturday 28 March 2026 19:27:15 -0400 (0:00:00.160) 0:04:50.871 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type != \"stratis\"", "skip_reason": "Conditional result was False" } TASK [See whether the device node is present] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3 Saturday 28 March 2026 19:27:15 -0400 (0:00:00.123) 0:04:50.994 ******** ok: [managed-node12] => { "changed": false, "stat": { "atime": 1774740232.0172572, "attr_flags": "", "attributes": [], "block_size": 512, "blocks": 0, "charset": "binary", "ctime": 1774740232.0172572, "dev": 6, "device_type": 64774, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 1496, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/symlink", "mode": "0660", "mtime": 1774740232.0172572, "nlink": 1, "path": "/dev/stratis/foo/test1", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9 Saturday 28 March 2026 19:27:16 -0400 (0:00:00.774) 0:04:51.769 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Verify the presence/absence of the device node - 2] ********************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16 Saturday 28 March 2026 19:27:16 -0400 (0:00:00.204) 0:04:51.974 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "not (_storage_test_volume_present or storage_test_volume.type == 'disk')", "skip_reason": "Conditional result was False" } TASK [Make sure we got info about this volume] ********************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23 Saturday 28 March 2026 19:27:17 -0400 (0:00:00.178) 0:04:52.152 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Process volume type (set initial value) (1/2)] *************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29 Saturday 28 March 2026 19:27:17 -0400 (0:00:00.490) 0:04:52.650 ******** ok: [managed-node12] => { "ansible_facts": { "st_volume_type": "stratis" }, "changed": false } TASK [Process volume type (get RAID value) (2/2)] ****************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33 Saturday 28 March 2026 19:27:17 -0400 (0:00:00.158) 0:04:52.809 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == \"raid\"", "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38 Saturday 28 March 2026 19:27:17 -0400 (0:00:00.129) 0:04:52.942 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Stat the LUKS device, if encrypted] ************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3 Saturday 28 March 2026 19:27:17 -0400 (0:00:00.126) 0:04:53.069 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10 Saturday 28 March 2026 19:27:18 -0400 (0:00:00.080) 0:04:53.149 ******** ok: [managed-node12] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: cryptsetup TASK [Collect LUKS info for this volume] *************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16 Saturday 28 March 2026 19:27:19 -0400 (0:00:01.112) 0:04:54.262 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.encryption and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22 Saturday 28 March 2026 19:27:19 -0400 (0:00:00.162) 0:04:54.424 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29 Saturday 28 March 2026 19:27:19 -0400 (0:00:00.129) 0:04:54.554 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40 Saturday 28 March 2026 19:27:19 -0400 (0:00:00.177) 0:04:54.732 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46 Saturday 28 March 2026 19:27:19 -0400 (0:00:00.150) 0:04:54.883 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51 Saturday 28 March 2026 19:27:19 -0400 (0:00:00.125) 0:04:55.008 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:64 Saturday 28 March 2026 19:27:20 -0400 (0:00:00.166) 0:04:55.175 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:77 Saturday 28 March 2026 19:27:20 -0400 (0:00:00.149) 0:04:55.324 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Set test variables] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:90 Saturday 28 March 2026 19:27:20 -0400 (0:00:00.143) 0:04:55.468 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:96 Saturday 28 March 2026 19:27:20 -0400 (0:00:00.269) 0:04:55.738 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:103 Saturday 28 March 2026 19:27:20 -0400 (0:00:00.257) 0:04:55.995 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:111 Saturday 28 March 2026 19:27:21 -0400 (0:00:00.177) 0:04:56.173 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:119 Saturday 28 March 2026 19:27:21 -0400 (0:00:00.145) 0:04:56.318 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:127 Saturday 28 March 2026 19:27:21 -0400 (0:00:00.128) 0:04:56.447 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [Get information about RAID] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8 Saturday 28 March 2026 19:27:21 -0400 (0:00:00.087) 0:04:56.535 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14 Saturday 28 March 2026 19:27:21 -0400 (0:00:00.088) 0:04:56.624 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19 Saturday 28 March 2026 19:27:21 -0400 (0:00:00.066) 0:04:56.690 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24 Saturday 28 March 2026 19:27:21 -0400 (0:00:00.089) 0:04:56.779 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set chunk size regex] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29 Saturday 28 March 2026 19:27:21 -0400 (0:00:00.063) 0:04:56.843 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37 Saturday 28 March 2026 19:27:21 -0400 (0:00:00.110) 0:04:56.953 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46 Saturday 28 March 2026 19:27:21 -0400 (0:00:00.120) 0:04:57.074 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54 Saturday 28 March 2026 19:27:22 -0400 (0:00:00.145) 0:04:57.220 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62 Saturday 28 March 2026 19:27:22 -0400 (0:00:00.126) 0:04:57.346 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70 Saturday 28 March 2026 19:27:22 -0400 (0:00:00.140) 0:04:57.487 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Parse the actual size of the volume] ************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3 Saturday 28 March 2026 19:27:22 -0400 (0:00:00.131) 0:04:57.618 ******** ok: [managed-node12] => { "bytes": 4294967296, "changed": false, "lvm": "4g", "parted": "4GiB", "size": "4 GiB" } TASK [Parse the requested size of the volume] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11 Saturday 28 March 2026 19:27:23 -0400 (0:00:00.693) 0:04:58.312 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == \"lvm\"", "skip_reason": "Conditional result was False" } TASK [Establish base value for expected size] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20 Saturday 28 March 2026 19:27:23 -0400 (0:00:00.134) 0:04:58.446 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == \"lvm\"", "skip_reason": "Conditional result was False" } TASK [Show expected size] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28 Saturday 28 March 2026 19:27:23 -0400 (0:00:00.163) 0:04:58.610 ******** ok: [managed-node12] => { "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined" } TASK [Get the size of parent/pool device] ************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32 Saturday 28 March 2026 19:27:23 -0400 (0:00:00.183) 0:04:58.794 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == \"lvm\"", "skip_reason": "Conditional result was False" } TASK [Show test pool] ********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46 Saturday 28 March 2026 19:27:23 -0400 (0:00:00.149) 0:04:58.943 ******** skipping: [managed-node12] => { "false_condition": "storage_test_volume.type == \"lvm\"" } TASK [Show test blockinfo] ***************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50 Saturday 28 March 2026 19:27:23 -0400 (0:00:00.139) 0:04:59.082 ******** skipping: [managed-node12] => { "false_condition": "storage_test_volume.type == \"lvm\"" } TASK [Show test pool size] ***************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54 Saturday 28 March 2026 19:27:24 -0400 (0:00:00.170) 0:04:59.253 ******** skipping: [managed-node12] => { "false_condition": "storage_test_volume.type == \"lvm\"" } TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58 Saturday 28 March 2026 19:27:24 -0400 (0:00:00.163) 0:04:59.416 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == \"lvm\"", "skip_reason": "Conditional result was False" } TASK [Default thin pool reserved space values] ********************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:68 Saturday 28 March 2026 19:27:24 -0400 (0:00:00.153) 0:04:59.569 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Default minimal thin pool reserved space size] *************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:72 Saturday 28 March 2026 19:27:24 -0400 (0:00:00.147) 0:04:59.719 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Default maximal thin pool reserved space size] *************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:77 Saturday 28 March 2026 19:27:24 -0400 (0:00:00.142) 0:04:59.862 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Calculate maximum usable space in thin pool] ***************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:83 Saturday 28 March 2026 19:27:24 -0400 (0:00:00.163) 0:05:00.028 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Apply upper size limit to max usable thin pool space] ******************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:87 Saturday 28 March 2026 19:27:25 -0400 (0:00:00.074) 0:05:00.103 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Apply lower size limit to max usable thin pool space] ******************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:92 Saturday 28 March 2026 19:27:25 -0400 (0:00:00.186) 0:05:00.289 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Convert maximum usable thin pool space from int to Size] ***************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:97 Saturday 28 March 2026 19:27:25 -0400 (0:00:00.191) 0:05:00.481 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Show max thin pool size] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:102 Saturday 28 March 2026 19:27:25 -0400 (0:00:00.246) 0:05:00.727 ******** skipping: [managed-node12] => { "false_condition": "storage_test_volume.thin | bool" } TASK [Show volume thin pool size] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:106 Saturday 28 March 2026 19:27:25 -0400 (0:00:00.177) 0:05:00.905 ******** skipping: [managed-node12] => { "false_condition": "storage_test_volume.thin | bool" } TASK [Show test volume size] *************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:110 Saturday 28 March 2026 19:27:26 -0400 (0:00:00.211) 0:05:01.117 ******** skipping: [managed-node12] => { "false_condition": "storage_test_volume.thin | bool" } TASK [Establish base value for expected thin pool size] ************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:114 Saturday 28 March 2026 19:27:26 -0400 (0:00:00.213) 0:05:01.330 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Calculate the expected size based on pool size and percentage value - 2] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:121 Saturday 28 March 2026 19:27:26 -0400 (0:00:00.207) 0:05:01.537 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Establish base value for expected thin pool volume size] ***************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:128 Saturday 28 March 2026 19:27:26 -0400 (0:00:00.211) 0:05:01.748 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Calculate the expected thin pool volume size based on percentage value] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:132 Saturday 28 March 2026 19:27:26 -0400 (0:00:00.174) 0:05:01.923 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Replace expected volume size with calculated value] ********************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:138 Saturday 28 March 2026 19:27:27 -0400 (0:00:00.194) 0:05:02.117 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Show actual size] ******************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:144 Saturday 28 March 2026 19:27:27 -0400 (0:00:00.199) 0:05:02.317 ******** ok: [managed-node12] => { "storage_test_actual_size": { "bytes": 4294967296, "changed": false, "failed": false, "lvm": "4g", "parted": "4GiB", "size": "4 GiB" } } TASK [Show expected size - 2] ************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:148 Saturday 28 March 2026 19:27:27 -0400 (0:00:00.221) 0:05:02.539 ******** ok: [managed-node12] => { "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined" } TASK [Assert expected size is actual size] ************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:152 Saturday 28 March 2026 19:27:27 -0400 (0:00:00.185) 0:05:02.725 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == \"lvm\"", "skip_reason": "Conditional result was False" } TASK [Get information about the LV] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5 Saturday 28 March 2026 19:27:27 -0400 (0:00:00.172) 0:05:02.897 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13 Saturday 28 March 2026 19:27:27 -0400 (0:00:00.145) 0:05:03.043 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17 Saturday 28 March 2026 19:27:28 -0400 (0:00:00.138) 0:05:03.181 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Set LV cache size] ******************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24 Saturday 28 March 2026 19:27:28 -0400 (0:00:00.101) 0:05:03.282 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Parse the requested cache size] ****************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31 Saturday 28 March 2026 19:27:28 -0400 (0:00:00.111) 0:05:03.394 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Set expected cache size] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37 Saturday 28 March 2026 19:27:28 -0400 (0:00:00.096) 0:05:03.491 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42 Saturday 28 March 2026 19:27:28 -0400 (0:00:00.097) 0:05:03.588 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25 Saturday 28 March 2026 19:27:28 -0400 (0:00:00.138) 0:05:03.727 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Set storage volume test variables] *************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2 Saturday 28 March 2026 19:27:28 -0400 (0:00:00.118) 0:05:03.846 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_volume_present": true, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [Run test verify for storage_test_volume_subset] ************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19 Saturday 28 March 2026 19:27:28 -0400 (0:00:00.175) 0:05:04.021 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node12 => (item=mount) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node12 => (item=fstab) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node12 => (item=fs) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node12 => (item=device) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node12 => (item=encryption) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node12 => (item=md) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node12 => (item=size) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node12 => (item=cache) TASK [Get expected mount device based on device type] ************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7 Saturday 28 March 2026 19:27:29 -0400 (0:00:00.883) 0:05:04.904 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_device_path": "/dev/stratis/foo/test2" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11 Saturday 28 March 2026 19:27:30 -0400 (0:00:00.226) 0:05:05.130 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_mount_expected_mount_point": "/opt/test2", "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Get information about the mountpoint directory] ************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19 Saturday 28 March 2026 19:27:30 -0400 (0:00:00.247) 0:05:05.378 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "(not storage_test_volume.mount_user is none and storage_test_volume.mount_user | length > 0) or (not storage_test_volume.mount_group is none and storage_test_volume.mount_group | length > 0) or (not storage_test_volume.mount_mode is none and storage_test_volume.mount_mode | length > 0)", "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by device] ******************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:32 Saturday 28 March 2026 19:27:30 -0400 (0:00:00.227) 0:05:05.605 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Verify mount directory user] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:40 Saturday 28 March 2026 19:27:30 -0400 (0:00:00.189) 0:05:05.795 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "not storage_test_volume.mount_user is none", "skip_reason": "Conditional result was False" } TASK [Verify mount directory group] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:51 Saturday 28 March 2026 19:27:30 -0400 (0:00:00.241) 0:05:06.036 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "not storage_test_volume.mount_group is none", "skip_reason": "Conditional result was False" } TASK [Verify mount directory permissions] ************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:62 Saturday 28 March 2026 19:27:31 -0400 (0:00:00.230) 0:05:06.267 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "not storage_test_volume.mount_mode is none", "skip_reason": "Conditional result was False" } TASK [Get path of test volume device] ****************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:76 Saturday 28 March 2026 19:27:31 -0400 (0:00:00.185) 0:05:06.452 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:82 Saturday 28 March 2026 19:27:31 -0400 (0:00:00.125) 0:05:06.578 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:88 Saturday 28 March 2026 19:27:31 -0400 (0:00:00.124) 0:05:06.703 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:98 Saturday 28 March 2026 19:27:31 -0400 (0:00:00.113) 0:05:06.817 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_found_mount_stat": null, "storage_test_mount_expected_mount_point": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2 Saturday 28 March 2026 19:27:31 -0400 (0:00:00.144) 0:05:06.961 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "1", "storage_test_fstab_expected_mount_options_matches": "1", "storage_test_fstab_expected_mount_point_matches": "1", "storage_test_fstab_id_matches": [ "UUID=e520ef01-29b1-479d-a588-3bb5ac0d55d9 " ], "storage_test_fstab_mount_options_matches": [ " /opt/test2 xfs defaults " ], "storage_test_fstab_mount_point_matches": [ " /opt/test2 " ] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17 Saturday 28 March 2026 19:27:32 -0400 (0:00:00.310) 0:05:07.271 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Verify the fstab mount point] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24 Saturday 28 March 2026 19:27:32 -0400 (0:00:00.170) 0:05:07.442 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33 Saturday 28 March 2026 19:27:32 -0400 (0:00:00.176) 0:05:07.619 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "__storage_verify_mount_options | d(false)", "skip_reason": "Conditional result was False" } TASK [Verify fingerprint] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45 Saturday 28 March 2026 19:27:32 -0400 (0:00:00.126) 0:05:07.746 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Clean up variables] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:52 Saturday 28 March 2026 19:27:32 -0400 (0:00:00.106) 0:05:07.852 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6 Saturday 28 March 2026 19:27:32 -0400 (0:00:00.142) 0:05:07.994 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type != \"stratis\"", "skip_reason": "Conditional result was False" } TASK [Verify fs label] ********************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14 Saturday 28 March 2026 19:27:32 -0400 (0:00:00.069) 0:05:08.064 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type != \"stratis\"", "skip_reason": "Conditional result was False" } TASK [See whether the device node is present] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3 Saturday 28 March 2026 19:27:33 -0400 (0:00:00.106) 0:05:08.170 ******** ok: [managed-node12] => { "changed": false, "stat": { "atime": 1774740408.6566026, "attr_flags": "", "attributes": [], "block_size": 512, "blocks": 0, "charset": "binary", "ctime": 1774740408.6566026, "dev": 6, "device_type": 64775, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 1521, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/symlink", "mode": "0660", "mtime": 1774740408.6566026, "nlink": 1, "path": "/dev/stratis/foo/test2", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9 Saturday 28 March 2026 19:27:33 -0400 (0:00:00.745) 0:05:08.916 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Verify the presence/absence of the device node - 2] ********************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16 Saturday 28 March 2026 19:27:33 -0400 (0:00:00.098) 0:05:09.014 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "not (_storage_test_volume_present or storage_test_volume.type == 'disk')", "skip_reason": "Conditional result was False" } TASK [Make sure we got info about this volume] ********************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23 Saturday 28 March 2026 19:27:34 -0400 (0:00:00.104) 0:05:09.119 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Process volume type (set initial value) (1/2)] *************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29 Saturday 28 March 2026 19:27:34 -0400 (0:00:00.165) 0:05:09.285 ******** ok: [managed-node12] => { "ansible_facts": { "st_volume_type": "stratis" }, "changed": false } TASK [Process volume type (get RAID value) (2/2)] ****************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33 Saturday 28 March 2026 19:27:34 -0400 (0:00:00.135) 0:05:09.420 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == \"raid\"", "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38 Saturday 28 March 2026 19:27:34 -0400 (0:00:00.166) 0:05:09.587 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Stat the LUKS device, if encrypted] ************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3 Saturday 28 March 2026 19:27:34 -0400 (0:00:00.252) 0:05:09.839 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10 Saturday 28 March 2026 19:27:34 -0400 (0:00:00.156) 0:05:09.996 ******** ok: [managed-node12] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: cryptsetup TASK [Collect LUKS info for this volume] *************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16 Saturday 28 March 2026 19:27:36 -0400 (0:00:01.254) 0:05:11.250 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.encryption and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22 Saturday 28 March 2026 19:27:36 -0400 (0:00:00.085) 0:05:11.335 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29 Saturday 28 March 2026 19:27:36 -0400 (0:00:00.093) 0:05:11.429 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40 Saturday 28 March 2026 19:27:36 -0400 (0:00:00.171) 0:05:11.600 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46 Saturday 28 March 2026 19:27:36 -0400 (0:00:00.065) 0:05:11.666 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51 Saturday 28 March 2026 19:27:36 -0400 (0:00:00.087) 0:05:11.753 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:64 Saturday 28 March 2026 19:27:36 -0400 (0:00:00.064) 0:05:11.818 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:77 Saturday 28 March 2026 19:27:36 -0400 (0:00:00.063) 0:05:11.881 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Set test variables] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:90 Saturday 28 March 2026 19:27:36 -0400 (0:00:00.104) 0:05:11.986 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:96 Saturday 28 March 2026 19:27:37 -0400 (0:00:00.140) 0:05:12.126 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:103 Saturday 28 March 2026 19:27:37 -0400 (0:00:00.086) 0:05:12.212 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:111 Saturday 28 March 2026 19:27:37 -0400 (0:00:00.083) 0:05:12.296 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:119 Saturday 28 March 2026 19:27:37 -0400 (0:00:00.099) 0:05:12.396 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:127 Saturday 28 March 2026 19:27:37 -0400 (0:00:00.074) 0:05:12.470 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [Get information about RAID] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8 Saturday 28 March 2026 19:27:37 -0400 (0:00:00.111) 0:05:12.582 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14 Saturday 28 March 2026 19:27:37 -0400 (0:00:00.084) 0:05:12.667 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19 Saturday 28 March 2026 19:27:37 -0400 (0:00:00.084) 0:05:12.751 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24 Saturday 28 March 2026 19:27:37 -0400 (0:00:00.110) 0:05:12.861 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set chunk size regex] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29 Saturday 28 March 2026 19:27:37 -0400 (0:00:00.081) 0:05:12.943 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37 Saturday 28 March 2026 19:27:37 -0400 (0:00:00.077) 0:05:13.020 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46 Saturday 28 March 2026 19:27:38 -0400 (0:00:00.118) 0:05:13.139 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54 Saturday 28 March 2026 19:27:38 -0400 (0:00:00.115) 0:05:13.269 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62 Saturday 28 March 2026 19:27:38 -0400 (0:00:00.089) 0:05:13.362 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70 Saturday 28 March 2026 19:27:38 -0400 (0:00:00.117) 0:05:13.480 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Parse the actual size of the volume] ************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3 Saturday 28 March 2026 19:27:38 -0400 (0:00:00.108) 0:05:13.589 ******** ok: [managed-node12] => { "bytes": 4294967296, "changed": false, "lvm": "4g", "parted": "4GiB", "size": "4 GiB" } TASK [Parse the requested size of the volume] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11 Saturday 28 March 2026 19:27:39 -0400 (0:00:00.760) 0:05:14.350 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == \"lvm\"", "skip_reason": "Conditional result was False" } TASK [Establish base value for expected size] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20 Saturday 28 March 2026 19:27:39 -0400 (0:00:00.133) 0:05:14.483 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == \"lvm\"", "skip_reason": "Conditional result was False" } TASK [Show expected size] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28 Saturday 28 March 2026 19:27:39 -0400 (0:00:00.134) 0:05:14.618 ******** ok: [managed-node12] => { "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined" } TASK [Get the size of parent/pool device] ************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32 Saturday 28 March 2026 19:27:39 -0400 (0:00:00.182) 0:05:14.800 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == \"lvm\"", "skip_reason": "Conditional result was False" } TASK [Show test pool] ********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46 Saturday 28 March 2026 19:27:39 -0400 (0:00:00.135) 0:05:14.936 ******** skipping: [managed-node12] => { "false_condition": "storage_test_volume.type == \"lvm\"" } TASK [Show test blockinfo] ***************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50 Saturday 28 March 2026 19:27:39 -0400 (0:00:00.123) 0:05:15.059 ******** skipping: [managed-node12] => { "false_condition": "storage_test_volume.type == \"lvm\"" } TASK [Show test pool size] ***************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54 Saturday 28 March 2026 19:27:40 -0400 (0:00:00.074) 0:05:15.134 ******** skipping: [managed-node12] => { "false_condition": "storage_test_volume.type == \"lvm\"" } TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58 Saturday 28 March 2026 19:27:40 -0400 (0:00:00.138) 0:05:15.272 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == \"lvm\"", "skip_reason": "Conditional result was False" } TASK [Default thin pool reserved space values] ********************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:68 Saturday 28 March 2026 19:27:40 -0400 (0:00:00.149) 0:05:15.422 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Default minimal thin pool reserved space size] *************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:72 Saturday 28 March 2026 19:27:40 -0400 (0:00:00.197) 0:05:15.619 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Default maximal thin pool reserved space size] *************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:77 Saturday 28 March 2026 19:27:40 -0400 (0:00:00.200) 0:05:15.820 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Calculate maximum usable space in thin pool] ***************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:83 Saturday 28 March 2026 19:27:40 -0400 (0:00:00.156) 0:05:15.977 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Apply upper size limit to max usable thin pool space] ******************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:87 Saturday 28 March 2026 19:27:41 -0400 (0:00:00.199) 0:05:16.176 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Apply lower size limit to max usable thin pool space] ******************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:92 Saturday 28 March 2026 19:27:41 -0400 (0:00:00.162) 0:05:16.338 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Convert maximum usable thin pool space from int to Size] ***************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:97 Saturday 28 March 2026 19:27:41 -0400 (0:00:00.214) 0:05:16.553 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Show max thin pool size] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:102 Saturday 28 March 2026 19:27:41 -0400 (0:00:00.183) 0:05:16.736 ******** skipping: [managed-node12] => { "false_condition": "storage_test_volume.thin | bool" } TASK [Show volume thin pool size] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:106 Saturday 28 March 2026 19:27:41 -0400 (0:00:00.212) 0:05:16.949 ******** skipping: [managed-node12] => { "false_condition": "storage_test_volume.thin | bool" } TASK [Show test volume size] *************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:110 Saturday 28 March 2026 19:27:42 -0400 (0:00:00.178) 0:05:17.128 ******** skipping: [managed-node12] => { "false_condition": "storage_test_volume.thin | bool" } TASK [Establish base value for expected thin pool size] ************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:114 Saturday 28 March 2026 19:27:42 -0400 (0:00:00.206) 0:05:17.334 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Calculate the expected size based on pool size and percentage value - 2] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:121 Saturday 28 March 2026 19:27:42 -0400 (0:00:00.202) 0:05:17.536 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Establish base value for expected thin pool volume size] ***************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:128 Saturday 28 March 2026 19:27:42 -0400 (0:00:00.172) 0:05:17.709 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Calculate the expected thin pool volume size based on percentage value] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:132 Saturday 28 March 2026 19:27:42 -0400 (0:00:00.205) 0:05:17.914 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Replace expected volume size with calculated value] ********************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:138 Saturday 28 March 2026 19:27:43 -0400 (0:00:00.220) 0:05:18.135 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Show actual size] ******************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:144 Saturday 28 March 2026 19:27:43 -0400 (0:00:00.172) 0:05:18.307 ******** ok: [managed-node12] => { "storage_test_actual_size": { "bytes": 4294967296, "changed": false, "failed": false, "lvm": "4g", "parted": "4GiB", "size": "4 GiB" } } TASK [Show expected size - 2] ************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:148 Saturday 28 March 2026 19:27:43 -0400 (0:00:00.125) 0:05:18.433 ******** ok: [managed-node12] => { "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined" } TASK [Assert expected size is actual size] ************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:152 Saturday 28 March 2026 19:27:43 -0400 (0:00:00.194) 0:05:18.628 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == \"lvm\"", "skip_reason": "Conditional result was False" } TASK [Get information about the LV] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5 Saturday 28 March 2026 19:27:43 -0400 (0:00:00.129) 0:05:18.757 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13 Saturday 28 March 2026 19:27:43 -0400 (0:00:00.149) 0:05:18.906 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17 Saturday 28 March 2026 19:27:43 -0400 (0:00:00.130) 0:05:19.036 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Set LV cache size] ******************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24 Saturday 28 March 2026 19:27:44 -0400 (0:00:00.118) 0:05:19.155 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Parse the requested cache size] ****************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31 Saturday 28 March 2026 19:27:44 -0400 (0:00:00.152) 0:05:19.307 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Set expected cache size] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37 Saturday 28 March 2026 19:27:44 -0400 (0:00:00.453) 0:05:19.760 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42 Saturday 28 March 2026 19:27:44 -0400 (0:00:00.147) 0:05:19.907 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25 Saturday 28 March 2026 19:27:44 -0400 (0:00:00.130) 0:05:20.038 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:43 Saturday 28 March 2026 19:27:45 -0400 (0:00:00.139) 0:05:20.177 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Clean up variable namespace] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:52 Saturday 28 March 2026 19:27:45 -0400 (0:00:00.141) 0:05:20.319 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Clean up] **************************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:110 Saturday 28 March 2026 19:27:45 -0400 (0:00:00.189) 0:05:20.508 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tasks/run_role_with_clear_facts.yml for managed-node12 TASK [Clear facts] ************************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tasks/run_role_with_clear_facts.yml:10 Saturday 28 March 2026 19:27:45 -0400 (0:00:00.362) 0:05:20.871 ******** META: facts cleared TASK [Run the role] ************************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tasks/run_role_with_clear_facts.yml:24 Saturday 28 March 2026 19:27:45 -0400 (0:00:00.054) 0:05:20.925 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "__sr_failed_when is defined", "skip_reason": "Conditional result was False" } TASK [Run the role normally] *************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tasks/run_role_with_clear_facts.yml:34 Saturday 28 March 2026 19:27:45 -0400 (0:00:00.165) 0:05:21.091 ******** included: fedora.linux_system_roles.storage for managed-node12 TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Saturday 28 March 2026 19:27:46 -0400 (0:00:00.253) 0:05:21.345 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node12 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Saturday 28 March 2026 19:27:46 -0400 (0:00:00.176) 0:05:21.521 ******** ok: [managed-node12] TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Saturday 28 March 2026 19:27:48 -0400 (0:00:01.643) 0:05:23.165 ******** skipping: [managed-node12] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node12] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node12] => (item=CentOS_10.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-fs", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "xfsprogs", "stratisd", "stratis-cli", "{{ 'libblockdev-s390' if ansible_facts['architecture'] == 's390x' else 'libblockdev' }}", "vdo" ] }, "ansible_included_var_files": [ "/tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node12] => (item=CentOS_10.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-fs", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "xfsprogs", "stratisd", "stratis-cli", "{{ 'libblockdev-s390' if ansible_facts['architecture'] == 's390x' else 'libblockdev' }}", "vdo" ] }, "ansible_included_var_files": [ "/tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Saturday 28 March 2026 19:27:48 -0400 (0:00:00.245) 0:05:23.411 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "not __storage_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Saturday 28 March 2026 19:27:48 -0400 (0:00:00.086) 0:05:23.498 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "not __storage_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Saturday 28 March 2026 19:27:48 -0400 (0:00:00.142) 0:05:23.640 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Saturday 28 March 2026 19:27:48 -0400 (0:00:00.122) 0:05:23.762 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Saturday 28 March 2026 19:27:48 -0400 (0:00:00.122) 0:05:23.885 ******** redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node12 TASK [fedora.linux_system_roles.storage : Add repo key] ************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Saturday 28 March 2026 19:27:49 -0400 (0:00:00.424) 0:05:24.310 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_blivet_custom_repo.key is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Add blivet repo] ********************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:15 Saturday 28 March 2026 19:27:49 -0400 (0:00:00.143) 0:05:24.453 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_blivet_custom_repo.key is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:20 Saturday 28 March 2026 19:27:49 -0400 (0:00:00.129) 0:05:24.582 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:27 Saturday 28 March 2026 19:27:49 -0400 (0:00:00.195) 0:05:24.778 ******** ok: [managed-node12] => { "storage_pools | d([])": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "name": "foo", "state": "absent", "type": "stratis", "volumes": [ { "mount_point": "/opt/test1", "name": "test1", "size": "4g", "state": "absent" }, { "mount_point": "/opt/test2", "name": "test2", "size": "4g", "state": "absent" } ] } ] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:32 Saturday 28 March 2026 19:27:49 -0400 (0:00:00.193) 0:05:24.972 ******** ok: [managed-node12] => { "storage_volumes | d([])": [] } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37 Saturday 28 March 2026 19:27:50 -0400 (0:00:00.202) 0:05:25.174 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:50 Saturday 28 March 2026 19:27:50 -0400 (0:00:00.141) 0:05:25.316 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:56 Saturday 28 March 2026 19:27:50 -0400 (0:00:00.146) 0:05:25.462 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:76 Saturday 28 March 2026 19:27:50 -0400 (0:00:00.188) 0:05:25.650 ******** ok: [managed-node12] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "apt-daily.service": { "name": "apt-daily.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fcoe.service": { "name": "fcoe.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "iscsi-shutdown.service": { "name": "iscsi-shutdown.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsi.service": { "name": "iscsi.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsid.service": { "name": "iscsid.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-activation-early.service": { "name": "lvm2-activation-early.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_multipath.service": { "name": "modprobe@dm_multipath.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "raid-check.service": { "name": "raid-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "rbdmap.service": { "name": "rbdmap.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-unix-local@.service": { "name": "sshd-unix-local@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd-vsock@.service": { "name": "sshd-vsock@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "stratis-fstab-setup-with-network@.service": { "name": "stratis-fstab-setup-with-network@.service", "source": "systemd", "state": "unknown", "status": "static" }, "stratis-fstab-setup@.service": { "name": "stratis-fstab-setup@.service", "source": "systemd", "state": "unknown", "status": "static" }, "stratisd-min-postinitrd.service": { "name": "stratisd-min-postinitrd.service", "source": "systemd", "state": "inactive", "status": "static" }, "stratisd.service": { "name": "stratisd.service", "source": "systemd", "state": "running", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-userdbd.service": { "name": "systemd-userdbd.service", "source": "systemd", "state": "running", "status": "indirect" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:82 Saturday 28 March 2026 19:27:53 -0400 (0:00:02.566) 0:05:28.217 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:88 Saturday 28 March 2026 19:27:53 -0400 (0:00:00.196) 0:05:28.413 ******** changed: [managed-node12] => { "actions": [ { "action": "destroy format", "device": "/dev/stratis/foo/test2", "fs_type": "stratis xfs" }, { "action": "destroy device", "device": "/dev/stratis/foo/test2", "fs_type": null }, { "action": "destroy format", "device": "/dev/stratis/foo/test1", "fs_type": "stratis xfs" }, { "action": "destroy device", "device": "/dev/stratis/foo/test1", "fs_type": null }, { "action": "destroy device", "device": "/dev/stratis/foo", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdb", "fs_type": "stratis" }, { "action": "destroy format", "device": "/dev/sdh", "fs_type": "stratis" }, { "action": "destroy format", "device": "/dev/sdi", "fs_type": "stratis" }, { "action": "destroy format", "device": "/dev/sdf", "fs_type": "stratis" }, { "action": "destroy format", "device": "/dev/sda", "fs_type": "stratis" }, { "action": "destroy format", "device": "/dev/sdc", "fs_type": "stratis" }, { "action": "destroy format", "device": "/dev/sdg", "fs_type": "stratis" }, { "action": "destroy format", "device": "/dev/sde", "fs_type": "stratis" }, { "action": "destroy format", "device": "/dev/sdd", "fs_type": "stratis" } ], "changed": true, "crypts": [], "leaves": [ "/dev/sda", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/xvda2" ], "mounts": [ { "fstype": "xfs", "path": "/opt/test2", "src": "UUID=e520ef01-29b1-479d-a588-3bb5ac0d55d9", "state": "absent" }, { "fstype": "xfs", "path": "/opt/test1", "src": "UUID=38081a08-cf75-4f9b-b424-e901b7e9592f", "state": "absent" } ], "packages": [ "xfsprogs" ], "pools": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "stratis", "volumes": [ { "_device": "/dev/stratis/foo/test1", "_mount_id": "UUID=38081a08-cf75-4f9b-b424-e901b7e9592f", "_raw_device": "/dev/stratis/foo/test1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null }, { "_device": "/dev/stratis/foo/test2", "_mount_id": "UUID=e520ef01-29b1-479d-a588-3bb5ac0d55d9", "_raw_device": "/dev/stratis/foo/test2", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "mount_user": null, "name": "test2", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:103 Saturday 28 March 2026 19:28:19 -0400 (0:00:25.718) 0:05:54.132 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_udevadm_trigger | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ****** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:110 Saturday 28 March 2026 19:28:19 -0400 (0:00:00.053) 0:05:54.185 ******** ok: [managed-node12] => { "changed": false, "stat": { "atime": 1774740414.5316806, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "21c6c34cc0511ce1d61ffd0f572d65a203e19010", "ctime": 1774740414.5293355, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 230686985, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1774740414.5293355, "nlink": 1, "path": "/etc/fstab", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1548, "uid": 0, "version": "1679064280", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:115 Saturday 28 March 2026 19:28:19 -0400 (0:00:00.598) 0:05:54.783 ******** ok: [managed-node12] => { "backup": "", "changed": false } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:133 Saturday 28 March 2026 19:28:20 -0400 (0:00:00.825) 0:05:55.609 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:139 Saturday 28 March 2026 19:28:20 -0400 (0:00:00.222) 0:05:55.831 ******** ok: [managed-node12] => { "blivet_output": { "actions": [ { "action": "destroy format", "device": "/dev/stratis/foo/test2", "fs_type": "stratis xfs" }, { "action": "destroy device", "device": "/dev/stratis/foo/test2", "fs_type": null }, { "action": "destroy format", "device": "/dev/stratis/foo/test1", "fs_type": "stratis xfs" }, { "action": "destroy device", "device": "/dev/stratis/foo/test1", "fs_type": null }, { "action": "destroy device", "device": "/dev/stratis/foo", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdb", "fs_type": "stratis" }, { "action": "destroy format", "device": "/dev/sdh", "fs_type": "stratis" }, { "action": "destroy format", "device": "/dev/sdi", "fs_type": "stratis" }, { "action": "destroy format", "device": "/dev/sdf", "fs_type": "stratis" }, { "action": "destroy format", "device": "/dev/sda", "fs_type": "stratis" }, { "action": "destroy format", "device": "/dev/sdc", "fs_type": "stratis" }, { "action": "destroy format", "device": "/dev/sdg", "fs_type": "stratis" }, { "action": "destroy format", "device": "/dev/sde", "fs_type": "stratis" }, { "action": "destroy format", "device": "/dev/sdd", "fs_type": "stratis" } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/sda", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/xvda2" ], "mounts": [ { "fstype": "xfs", "path": "/opt/test2", "src": "UUID=e520ef01-29b1-479d-a588-3bb5ac0d55d9", "state": "absent" }, { "fstype": "xfs", "path": "/opt/test1", "src": "UUID=38081a08-cf75-4f9b-b424-e901b7e9592f", "state": "absent" } ], "packages": [ "xfsprogs" ], "pools": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "stratis", "volumes": [ { "_device": "/dev/stratis/foo/test1", "_mount_id": "UUID=38081a08-cf75-4f9b-b424-e901b7e9592f", "_raw_device": "/dev/stratis/foo/test1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null }, { "_device": "/dev/stratis/foo/test2", "_mount_id": "UUID=e520ef01-29b1-479d-a588-3bb5ac0d55d9", "_raw_device": "/dev/stratis/foo/test2", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "mount_user": null, "name": "test2", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148 Saturday 28 March 2026 19:28:20 -0400 (0:00:00.170) 0:05:56.002 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "stratis", "volumes": [ { "_device": "/dev/stratis/foo/test1", "_mount_id": "UUID=38081a08-cf75-4f9b-b424-e901b7e9592f", "_raw_device": "/dev/stratis/foo/test1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null }, { "_device": "/dev/stratis/foo/test2", "_mount_id": "UUID=e520ef01-29b1-479d-a588-3bb5ac0d55d9", "_raw_device": "/dev/stratis/foo/test2", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "mount_user": null, "name": "test2", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:152 Saturday 28 March 2026 19:28:21 -0400 (0:00:00.143) 0:05:56.145 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:168 Saturday 28 March 2026 19:28:21 -0400 (0:00:00.134) 0:05:56.280 ******** redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount changed: [managed-node12] => (item={'src': 'UUID=e520ef01-29b1-479d-a588-3bb5ac0d55d9', 'path': '/opt/test2', 'state': 'absent', 'fstype': 'xfs'}) => { "ansible_loop_var": "mount_info", "backup_file": "", "boot": "yes", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "xfs", "mount_info": { "fstype": "xfs", "path": "/opt/test2", "src": "UUID=e520ef01-29b1-479d-a588-3bb5ac0d55d9", "state": "absent" }, "name": "/opt/test2", "opts": "defaults", "passno": "0", "src": "UUID=e520ef01-29b1-479d-a588-3bb5ac0d55d9" } redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount changed: [managed-node12] => (item={'src': 'UUID=38081a08-cf75-4f9b-b424-e901b7e9592f', 'path': '/opt/test1', 'state': 'absent', 'fstype': 'xfs'}) => { "ansible_loop_var": "mount_info", "backup_file": "", "boot": "yes", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "xfs", "mount_info": { "fstype": "xfs", "path": "/opt/test1", "src": "UUID=38081a08-cf75-4f9b-b424-e901b7e9592f", "state": "absent" }, "name": "/opt/test1", "opts": "defaults", "passno": "0", "src": "UUID=38081a08-cf75-4f9b-b424-e901b7e9592f" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:179 Saturday 28 March 2026 19:28:22 -0400 (0:00:01.058) 0:05:57.338 ******** ok: [managed-node12] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:184 Saturday 28 March 2026 19:28:23 -0400 (0:00:01.041) 0:05:58.380 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195 Saturday 28 March 2026 19:28:23 -0400 (0:00:00.136) 0:05:58.516 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:207 Saturday 28 March 2026 19:28:23 -0400 (0:00:00.137) 0:05:58.654 ******** ok: [managed-node12] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:215 Saturday 28 March 2026 19:28:24 -0400 (0:00:00.956) 0:05:59.610 ******** ok: [managed-node12] => { "changed": false, "stat": { "atime": 1774737762.6954868, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1774342704.475, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 4194435, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1774342323.629, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "1224473831", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:220 Saturday 28 March 2026 19:28:25 -0400 (0:00:00.664) 0:06:00.275 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:242 Saturday 28 March 2026 19:28:25 -0400 (0:00:00.047) 0:06:00.322 ******** ok: [managed-node12] TASK [Verify role results - 4] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:128 Saturday 28 March 2026 19:28:26 -0400 (0:00:01.174) 0:06:01.496 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node12 TASK [Print out pool information] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2 Saturday 28 March 2026 19:28:26 -0400 (0:00:00.155) 0:06:01.652 ******** ok: [managed-node12] => { "_storage_pools_list": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "stratis", "volumes": [ { "_device": "/dev/stratis/foo/test1", "_mount_id": "UUID=38081a08-cf75-4f9b-b424-e901b7e9592f", "_raw_device": "/dev/stratis/foo/test1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null }, { "_device": "/dev/stratis/foo/test2", "_mount_id": "UUID=e520ef01-29b1-479d-a588-3bb5ac0d55d9", "_raw_device": "/dev/stratis/foo/test2", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "mount_user": null, "name": "test2", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } ] } ] } TASK [Print out volume information] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7 Saturday 28 March 2026 19:28:26 -0400 (0:00:00.169) 0:06:01.822 ******** skipping: [managed-node12] => { "false_condition": "_storage_volumes_list | length > 0" } TASK [Collect info about the volumes.] ***************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15 Saturday 28 March 2026 19:28:26 -0400 (0:00:00.147) 0:06:01.969 ******** ok: [managed-node12] => { "changed": false, "info": { "/dev/loop0": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/loop0", "size": "0B", "type": "loop", "uuid": "" }, "/dev/sda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdb": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda1", "size": "1M", "type": "partition", "uuid": "" }, "/dev/xvda2": { "fstype": "xfs", "label": "", "mountpoint": "/", "name": "/dev/xvda2", "size": "250G", "type": "partition", "uuid": "94fc577e-f1df-44bb-8e86-63b9a68f8f7f" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20 Saturday 28 March 2026 19:28:27 -0400 (0:00:00.713) 0:06:02.683 ******** ok: [managed-node12] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.002995", "end": "2026-03-28 19:28:28.164816", "rc": 0, "start": "2026-03-28 19:28:28.161821" } STDOUT: # system_role:storage # # /etc/fstab # Created by anaconda on Tue Mar 24 08:52:03 2026 # # Accessible filesystems, by reference, are maintained under '/dev/disk/'. # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info. # # After editing this file, run 'systemctl daemon-reload' to update systemd # units generated from this file. # UUID=94fc577e-f1df-44bb-8e86-63b9a68f8f7f / xfs defaults 0 0 ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25 Saturday 28 March 2026 19:28:28 -0400 (0:00:00.700) 0:06:03.383 ******** ok: [managed-node12] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.002954", "end": "2026-03-28 19:28:28.832003", "failed_when_result": false, "rc": 0, "start": "2026-03-28 19:28:28.829049" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34 Saturday 28 March 2026 19:28:28 -0400 (0:00:00.680) 0:06:04.064 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node12 => (item={'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'absent', 'type': 'stratis', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'part_type': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=38081a08-cf75-4f9b-b424-e901b7e9592f'}, {'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test2', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'part_type': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test2', '_raw_device': '/dev/stratis/foo/test2', '_mount_id': 'UUID=e520ef01-29b1-479d-a588-3bb5ac0d55d9'}]}) TASK [Set _storage_pool_tests] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5 Saturday 28 March 2026 19:28:29 -0400 (0:00:00.284) 0:06:04.349 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Get VG shared value status] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18 Saturday 28 March 2026 19:28:29 -0400 (0:00:00.105) 0:06:04.454 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'", "skip_reason": "Conditional result was False" } TASK [Verify that VG shared value checks out] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24 Saturday 28 March 2026 19:28:29 -0400 (0:00:00.119) 0:06:04.574 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'", "skip_reason": "Conditional result was False" } TASK [Verify pool subset] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34 Saturday 28 March 2026 19:28:29 -0400 (0:00:00.112) 0:06:04.687 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node12 => (item=members) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node12 => (item=volumes) TASK [Set test variables] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2 Saturday 28 March 2026 19:28:29 -0400 (0:00:00.313) 0:06:05.000 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Get the canonical device path for each member device] ******************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8 Saturday 28 March 2026 19:28:30 -0400 (0:00:00.117) 0:06:05.118 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Set pvs lvm length] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17 Saturday 28 March 2026 19:28:30 -0400 (0:00:00.067) 0:06:05.186 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Set pool pvs] ************************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22 Saturday 28 March 2026 19:28:30 -0400 (0:00:00.055) 0:06:05.241 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Verify PV count] ********************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27 Saturday 28 March 2026 19:28:30 -0400 (0:00:00.072) 0:06:05.313 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Set expected pv type] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36 Saturday 28 March 2026 19:28:30 -0400 (0:00:00.095) 0:06:05.409 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Set expected pv type - 2] ************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41 Saturday 28 March 2026 19:28:30 -0400 (0:00:00.118) 0:06:05.527 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm' and not storage_test_pool.encryption", "skip_reason": "Conditional result was False" } TASK [Set expected pv type - 3] ************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46 Saturday 28 March 2026 19:28:30 -0400 (0:00:00.089) 0:06:05.616 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:55 Saturday 28 March 2026 19:28:30 -0400 (0:00:00.082) 0:06:05.700 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Check that blivet supports PV grow to fill] ****************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:68 Saturday 28 March 2026 19:28:30 -0400 (0:00:00.077) 0:06:05.777 ******** ok: [managed-node12] => { "changed": false, "failed_when_result": false, "rc": 0 } STDERR: OpenSSH_9.9p1, OpenSSL 3.5.5 27 Jan 2026 debug1: Reading configuration data /root/.ssh/config debug1: Reading configuration data /etc/ssh/ssh_config debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf debug2: checking match for 'final all' host 10.31.11.27 originally 10.31.11.27 debug2: match not found debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config debug1: configuration requests final Match pass debug2: resolve_canonicalize: hostname 10.31.11.27 is address debug1: re-parsing configuration debug1: Reading configuration data /root/.ssh/config debug1: Reading configuration data /etc/ssh/ssh_config debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf debug2: checking match for 'final all' host 10.31.11.27 originally 10.31.11.27 debug2: match found debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config debug1: auto-mux: Trying existing master at '/root/.ansible/cp/e1141b92e1' debug2: fd 3 setting O_NONBLOCK debug2: mux_client_hello_exchange: master version 4 debug1: mux_client_request_session: master session id: 2 debug2: Received exit status from master 0 Shared connection to 10.31.11.27 closed. TASK [Verify that PVs fill the whole devices when they should] ***************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:78 Saturday 28 March 2026 19:28:31 -0400 (0:00:00.574) 0:06:06.352 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Check MD RAID] *********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:88 Saturday 28 March 2026 19:28:31 -0400 (0:00:00.143) 0:06:06.495 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node12 TASK [Get information about RAID] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8 Saturday 28 March 2026 19:28:31 -0400 (0:00:00.219) 0:06:06.715 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14 Saturday 28 March 2026 19:28:31 -0400 (0:00:00.105) 0:06:06.821 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19 Saturday 28 March 2026 19:28:31 -0400 (0:00:00.115) 0:06:06.936 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24 Saturday 28 March 2026 19:28:31 -0400 (0:00:00.108) 0:06:07.045 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set md chunk size regex] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29 Saturday 28 March 2026 19:28:32 -0400 (0:00:00.110) 0:06:07.155 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37 Saturday 28 March 2026 19:28:32 -0400 (0:00:00.126) 0:06:07.282 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46 Saturday 28 March 2026 19:28:32 -0400 (0:00:00.106) 0:06:07.389 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55 Saturday 28 March 2026 19:28:32 -0400 (0:00:00.100) 0:06:07.489 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64 Saturday 28 March 2026 19:28:32 -0400 (0:00:00.097) 0:06:07.587 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74 Saturday 28 March 2026 19:28:32 -0400 (0:00:00.164) 0:06:07.752 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83 Saturday 28 March 2026 19:28:32 -0400 (0:00:00.142) 0:06:07.898 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_chunk_size_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:91 Saturday 28 March 2026 19:28:32 -0400 (0:00:00.140) 0:06:08.038 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node12 TASK [Validate pool member LVM RAID settings] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2 Saturday 28 March 2026 19:28:33 -0400 (0:00:00.207) 0:06:08.247 ******** skipping: [managed-node12] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'part_type': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=38081a08-cf75-4f9b-b424-e901b7e9592f'}) => { "ansible_loop_var": "storage_test_lvmraid_volume", "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False", "storage_test_lvmraid_volume": { "_device": "/dev/stratis/foo/test1", "_mount_id": "UUID=38081a08-cf75-4f9b-b424-e901b7e9592f", "_raw_device": "/dev/stratis/foo/test1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } } skipping: [managed-node12] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test2', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'part_type': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test2', '_raw_device': '/dev/stratis/foo/test2', '_mount_id': 'UUID=e520ef01-29b1-479d-a588-3bb5ac0d55d9'}) => { "ansible_loop_var": "storage_test_lvmraid_volume", "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False", "storage_test_lvmraid_volume": { "_device": "/dev/stratis/foo/test2", "_mount_id": "UUID=e520ef01-29b1-479d-a588-3bb5ac0d55d9", "_raw_device": "/dev/stratis/foo/test2", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "mount_user": null, "name": "test2", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } } skipping: [managed-node12] => { "changed": false } MSG: All items skipped TASK [Check Thin Pools] ******************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:94 Saturday 28 March 2026 19:28:33 -0400 (0:00:00.148) 0:06:08.395 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node12 TASK [Validate pool member thinpool settings] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2 Saturday 28 March 2026 19:28:33 -0400 (0:00:00.249) 0:06:08.644 ******** skipping: [managed-node12] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'part_type': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=38081a08-cf75-4f9b-b424-e901b7e9592f'}) => { "ansible_loop_var": "storage_test_thin_volume", "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False", "storage_test_thin_volume": { "_device": "/dev/stratis/foo/test1", "_mount_id": "UUID=38081a08-cf75-4f9b-b424-e901b7e9592f", "_raw_device": "/dev/stratis/foo/test1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } } skipping: [managed-node12] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test2', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'part_type': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test2', '_raw_device': '/dev/stratis/foo/test2', '_mount_id': 'UUID=e520ef01-29b1-479d-a588-3bb5ac0d55d9'}) => { "ansible_loop_var": "storage_test_thin_volume", "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False", "storage_test_thin_volume": { "_device": "/dev/stratis/foo/test2", "_mount_id": "UUID=e520ef01-29b1-479d-a588-3bb5ac0d55d9", "_raw_device": "/dev/stratis/foo/test2", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "mount_user": null, "name": "test2", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } } skipping: [managed-node12] => { "changed": false } MSG: All items skipped TASK [Check member encryption] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:97 Saturday 28 March 2026 19:28:33 -0400 (0:00:00.229) 0:06:08.874 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node12 TASK [Set test variables] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5 Saturday 28 March 2026 19:28:34 -0400 (0:00:00.328) 0:06:09.202 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10 Saturday 28 March 2026 19:28:34 -0400 (0:00:00.089) 0:06:09.291 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Validate pool member crypttab entries] *********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17 Saturday 28 March 2026 19:28:34 -0400 (0:00:00.053) 0:06:09.345 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24 Saturday 28 March 2026 19:28:34 -0400 (0:00:00.035) 0:06:09.380 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:100 Saturday 28 March 2026 19:28:34 -0400 (0:00:00.098) 0:06:09.478 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node12 TASK [Validate pool member VDO settings] *************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2 Saturday 28 March 2026 19:28:34 -0400 (0:00:00.185) 0:06:09.663 ******** skipping: [managed-node12] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'part_type': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=38081a08-cf75-4f9b-b424-e901b7e9592f'}) => { "ansible_loop_var": "storage_test_vdo_volume", "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False", "storage_test_vdo_volume": { "_device": "/dev/stratis/foo/test1", "_mount_id": "UUID=38081a08-cf75-4f9b-b424-e901b7e9592f", "_raw_device": "/dev/stratis/foo/test1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } } skipping: [managed-node12] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test2', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'part_type': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test2', '_raw_device': '/dev/stratis/foo/test2', '_mount_id': 'UUID=e520ef01-29b1-479d-a588-3bb5ac0d55d9'}) => { "ansible_loop_var": "storage_test_vdo_volume", "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False", "storage_test_vdo_volume": { "_device": "/dev/stratis/foo/test2", "_mount_id": "UUID=e520ef01-29b1-479d-a588-3bb5ac0d55d9", "_raw_device": "/dev/stratis/foo/test2", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test2", "mount_user": null, "name": "test2", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } } skipping: [managed-node12] => { "changed": false } MSG: All items skipped TASK [Check Stratis] *********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:103 Saturday 28 March 2026 19:28:34 -0400 (0:00:00.225) 0:06:09.889 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node12 TASK [Get stratis pool information] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6 Saturday 28 March 2026 19:28:35 -0400 (0:00:00.430) 0:06:10.319 ******** ok: [managed-node12] => { "changed": false, "rc": 0 } STDOUT: null STDERR: OpenSSH_9.9p1, OpenSSL 3.5.5 27 Jan 2026 debug1: Reading configuration data /root/.ssh/config debug1: Reading configuration data /etc/ssh/ssh_config debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf debug2: checking match for 'final all' host 10.31.11.27 originally 10.31.11.27 debug2: match not found debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config debug1: configuration requests final Match pass debug2: resolve_canonicalize: hostname 10.31.11.27 is address debug1: re-parsing configuration debug1: Reading configuration data /root/.ssh/config debug1: Reading configuration data /etc/ssh/ssh_config debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf debug2: checking match for 'final all' host 10.31.11.27 originally 10.31.11.27 debug2: match found debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config debug1: auto-mux: Trying existing master at '/root/.ansible/cp/e1141b92e1' debug2: fd 3 setting O_NONBLOCK debug2: mux_client_hello_exchange: master version 4 debug1: mux_client_request_session: master session id: 2 debug2: Received exit status from master 0 Shared connection to 10.31.11.27 closed. TASK [Print script output] ***************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15 Saturday 28 March 2026 19:28:36 -0400 (0:00:00.799) 0:06:11.119 ******** ok: [managed-node12] => {} MSG: null TASK [Get information about Stratis] ******************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:19 Saturday 28 March 2026 19:28:36 -0400 (0:00:00.161) 0:06:11.280 ******** ok: [managed-node12] => { "ansible_facts": { "_stratis_pool_info": "" }, "changed": false } TASK [Verify that the pools was created] *************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:23 Saturday 28 March 2026 19:28:36 -0400 (0:00:00.183) 0:06:11.464 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.state == 'present'", "skip_reason": "Conditional result was False" } TASK [Verify that encryption is correctly set] ********************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:30 Saturday 28 March 2026 19:28:36 -0400 (0:00:00.168) 0:06:11.644 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.state == 'present'", "skip_reason": "Conditional result was False" } TASK [Verify that Clevis/Tang encryption is correctly set] ********************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:39 Saturday 28 March 2026 19:28:36 -0400 (0:00:00.164) 0:06:11.808 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.state == 'present'", "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:49 Saturday 28 March 2026 19:28:36 -0400 (0:00:00.098) 0:06:11.907 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_stratis_report": null }, "changed": false } TASK [Clean up test variables] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:106 Saturday 28 March 2026 19:28:36 -0400 (0:00:00.124) 0:06:12.031 ******** ok: [managed-node12] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3 Saturday 28 March 2026 19:28:37 -0400 (0:00:00.094) 0:06:12.126 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node12 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'part_type': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=38081a08-cf75-4f9b-b424-e901b7e9592f'}) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node12 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test2', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test2', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'part_type': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test2', '_raw_device': '/dev/stratis/foo/test2', '_mount_id': 'UUID=e520ef01-29b1-479d-a588-3bb5ac0d55d9'}) TASK [Set storage volume test variables] *************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2 Saturday 28 March 2026 19:28:37 -0400 (0:00:00.680) 0:06:12.807 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_volume_present": false, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [Run test verify for storage_test_volume_subset] ************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19 Saturday 28 March 2026 19:28:37 -0400 (0:00:00.176) 0:06:12.984 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node12 => (item=mount) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node12 => (item=fstab) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node12 => (item=fs) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node12 => (item=device) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node12 => (item=encryption) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node12 => (item=md) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node12 => (item=size) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node12 => (item=cache) TASK [Get expected mount device based on device type] ************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7 Saturday 28 March 2026 19:28:38 -0400 (0:00:01.041) 0:06:14.025 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_device_path": "/dev/stratis/foo/test1" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11 Saturday 28 March 2026 19:28:39 -0400 (0:00:00.233) 0:06:14.259 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_mount_expected_mount_point": "/opt/test1", "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Get information about the mountpoint directory] ************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19 Saturday 28 March 2026 19:28:39 -0400 (0:00:00.218) 0:06:14.478 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present | bool", "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by device] ******************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:32 Saturday 28 March 2026 19:28:39 -0400 (0:00:00.122) 0:06:14.601 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Verify mount directory user] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:40 Saturday 28 March 2026 19:28:39 -0400 (0:00:00.115) 0:06:14.717 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Verify mount directory group] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:51 Saturday 28 March 2026 19:28:39 -0400 (0:00:00.141) 0:06:14.858 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Verify mount directory permissions] ************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:62 Saturday 28 March 2026 19:28:39 -0400 (0:00:00.145) 0:06:15.004 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Get path of test volume device] ****************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:76 Saturday 28 March 2026 19:28:40 -0400 (0:00:00.138) 0:06:15.142 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:82 Saturday 28 March 2026 19:28:40 -0400 (0:00:00.139) 0:06:15.282 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:88 Saturday 28 March 2026 19:28:40 -0400 (0:00:00.139) 0:06:15.422 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:98 Saturday 28 March 2026 19:28:40 -0400 (0:00:00.121) 0:06:15.543 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_found_mount_stat": null, "storage_test_mount_expected_mount_point": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2 Saturday 28 March 2026 19:28:40 -0400 (0:00:00.154) 0:06:15.698 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "0", "storage_test_fstab_expected_mount_options_matches": "0", "storage_test_fstab_expected_mount_point_matches": "0", "storage_test_fstab_id_matches": [], "storage_test_fstab_mount_options_matches": [], "storage_test_fstab_mount_point_matches": [] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17 Saturday 28 March 2026 19:28:41 -0400 (0:00:00.425) 0:06:16.123 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present | bool", "skip_reason": "Conditional result was False" } TASK [Verify the fstab mount point] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24 Saturday 28 March 2026 19:28:41 -0400 (0:00:00.147) 0:06:16.270 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33 Saturday 28 March 2026 19:28:41 -0400 (0:00:00.202) 0:06:16.472 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "__storage_verify_mount_options | d(false)", "skip_reason": "Conditional result was False" } TASK [Verify fingerprint] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45 Saturday 28 March 2026 19:28:41 -0400 (0:00:00.147) 0:06:16.620 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Clean up variables] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:52 Saturday 28 March 2026 19:28:41 -0400 (0:00:00.170) 0:06:16.790 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6 Saturday 28 March 2026 19:28:41 -0400 (0:00:00.144) 0:06:16.934 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type != \"stratis\"", "skip_reason": "Conditional result was False" } TASK [Verify fs label] ********************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14 Saturday 28 March 2026 19:28:41 -0400 (0:00:00.162) 0:06:17.097 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type != \"stratis\"", "skip_reason": "Conditional result was False" } TASK [See whether the device node is present] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3 Saturday 28 March 2026 19:28:42 -0400 (0:00:00.112) 0:06:17.210 ******** ok: [managed-node12] => { "changed": false, "stat": { "exists": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9 Saturday 28 March 2026 19:28:42 -0400 (0:00:00.807) 0:06:18.017 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present or storage_test_volume.type == 'disk'", "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the device node - 2] ********************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16 Saturday 28 March 2026 19:28:43 -0400 (0:00:00.127) 0:06:18.144 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about this volume] ********************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23 Saturday 28 March 2026 19:28:43 -0400 (0:00:00.174) 0:06:18.318 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Process volume type (set initial value) (1/2)] *************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29 Saturday 28 March 2026 19:28:43 -0400 (0:00:00.112) 0:06:18.431 ******** ok: [managed-node12] => { "ansible_facts": { "st_volume_type": "stratis" }, "changed": false } TASK [Process volume type (get RAID value) (2/2)] ****************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33 Saturday 28 March 2026 19:28:43 -0400 (0:00:00.168) 0:06:18.600 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == \"raid\"", "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38 Saturday 28 March 2026 19:28:43 -0400 (0:00:00.148) 0:06:18.748 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Stat the LUKS device, if encrypted] ************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3 Saturday 28 March 2026 19:28:43 -0400 (0:00:00.132) 0:06:18.880 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10 Saturday 28 March 2026 19:28:43 -0400 (0:00:00.165) 0:06:19.046 ******** ok: [managed-node12] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: cryptsetup TASK [Collect LUKS info for this volume] *************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16 Saturday 28 March 2026 19:28:45 -0400 (0:00:01.172) 0:06:20.218 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.encryption and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22 Saturday 28 March 2026 19:28:45 -0400 (0:00:00.128) 0:06:20.347 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29 Saturday 28 March 2026 19:28:45 -0400 (0:00:00.129) 0:06:20.476 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40 Saturday 28 March 2026 19:28:45 -0400 (0:00:00.107) 0:06:20.584 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46 Saturday 28 March 2026 19:28:45 -0400 (0:00:00.098) 0:06:20.682 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51 Saturday 28 March 2026 19:28:45 -0400 (0:00:00.094) 0:06:20.777 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:64 Saturday 28 March 2026 19:28:45 -0400 (0:00:00.101) 0:06:20.878 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:77 Saturday 28 March 2026 19:28:45 -0400 (0:00:00.110) 0:06:20.989 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Set test variables] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:90 Saturday 28 March 2026 19:28:45 -0400 (0:00:00.108) 0:06:21.097 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:96 Saturday 28 March 2026 19:28:46 -0400 (0:00:00.179) 0:06:21.277 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:103 Saturday 28 March 2026 19:28:46 -0400 (0:00:00.110) 0:06:21.388 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:111 Saturday 28 March 2026 19:28:46 -0400 (0:00:00.076) 0:06:21.464 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:119 Saturday 28 March 2026 19:28:46 -0400 (0:00:00.062) 0:06:21.527 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:127 Saturday 28 March 2026 19:28:46 -0400 (0:00:00.087) 0:06:21.614 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [Get information about RAID] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8 Saturday 28 March 2026 19:28:46 -0400 (0:00:00.130) 0:06:21.744 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14 Saturday 28 March 2026 19:28:46 -0400 (0:00:00.139) 0:06:21.884 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19 Saturday 28 March 2026 19:28:46 -0400 (0:00:00.109) 0:06:21.994 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24 Saturday 28 March 2026 19:28:46 -0400 (0:00:00.105) 0:06:22.099 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set chunk size regex] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29 Saturday 28 March 2026 19:28:47 -0400 (0:00:00.101) 0:06:22.201 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37 Saturday 28 March 2026 19:28:47 -0400 (0:00:00.095) 0:06:22.296 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46 Saturday 28 March 2026 19:28:47 -0400 (0:00:00.113) 0:06:22.410 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54 Saturday 28 March 2026 19:28:47 -0400 (0:00:00.085) 0:06:22.496 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62 Saturday 28 March 2026 19:28:47 -0400 (0:00:00.074) 0:06:22.570 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70 Saturday 28 March 2026 19:28:47 -0400 (0:00:00.083) 0:06:22.654 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Parse the actual size of the volume] ************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3 Saturday 28 March 2026 19:28:47 -0400 (0:00:00.061) 0:06:22.716 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present | bool", "skip_reason": "Conditional result was False" } TASK [Parse the requested size of the volume] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11 Saturday 28 March 2026 19:28:47 -0400 (0:00:00.070) 0:06:22.786 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present | bool", "skip_reason": "Conditional result was False" } TASK [Establish base value for expected size] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20 Saturday 28 March 2026 19:28:47 -0400 (0:00:00.117) 0:06:22.903 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present | bool", "skip_reason": "Conditional result was False" } TASK [Show expected size] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28 Saturday 28 March 2026 19:28:47 -0400 (0:00:00.085) 0:06:22.989 ******** ok: [managed-node12] => { "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined" } TASK [Get the size of parent/pool device] ************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32 Saturday 28 March 2026 19:28:47 -0400 (0:00:00.106) 0:06:23.096 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present | bool", "skip_reason": "Conditional result was False" } TASK [Show test pool] ********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46 Saturday 28 March 2026 19:28:48 -0400 (0:00:00.091) 0:06:23.187 ******** skipping: [managed-node12] => { "false_condition": "_storage_test_volume_present | bool" } TASK [Show test blockinfo] ***************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50 Saturday 28 March 2026 19:28:48 -0400 (0:00:00.097) 0:06:23.284 ******** skipping: [managed-node12] => { "false_condition": "_storage_test_volume_present | bool" } TASK [Show test pool size] ***************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54 Saturday 28 March 2026 19:28:48 -0400 (0:00:00.056) 0:06:23.341 ******** skipping: [managed-node12] => { "false_condition": "_storage_test_volume_present | bool" } TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58 Saturday 28 March 2026 19:28:48 -0400 (0:00:00.058) 0:06:23.399 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present | bool", "skip_reason": "Conditional result was False" } TASK [Default thin pool reserved space values] ********************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:68 Saturday 28 March 2026 19:28:48 -0400 (0:00:00.070) 0:06:23.470 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Default minimal thin pool reserved space size] *************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:72 Saturday 28 March 2026 19:28:48 -0400 (0:00:00.073) 0:06:23.544 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Default maximal thin pool reserved space size] *************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:77 Saturday 28 March 2026 19:28:48 -0400 (0:00:00.114) 0:06:23.658 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Calculate maximum usable space in thin pool] ***************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:83 Saturday 28 March 2026 19:28:48 -0400 (0:00:00.128) 0:06:23.787 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Apply upper size limit to max usable thin pool space] ******************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:87 Saturday 28 March 2026 19:28:48 -0400 (0:00:00.117) 0:06:23.904 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Apply lower size limit to max usable thin pool space] ******************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:92 Saturday 28 March 2026 19:28:48 -0400 (0:00:00.092) 0:06:23.996 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Convert maximum usable thin pool space from int to Size] ***************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:97 Saturday 28 March 2026 19:28:48 -0400 (0:00:00.092) 0:06:24.088 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Show max thin pool size] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:102 Saturday 28 March 2026 19:28:49 -0400 (0:00:00.081) 0:06:24.170 ******** skipping: [managed-node12] => { "false_condition": "storage_test_volume.thin | bool" } TASK [Show volume thin pool size] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:106 Saturday 28 March 2026 19:28:49 -0400 (0:00:00.087) 0:06:24.257 ******** skipping: [managed-node12] => { "false_condition": "storage_test_volume.thin | bool" } TASK [Show test volume size] *************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:110 Saturday 28 March 2026 19:28:49 -0400 (0:00:00.098) 0:06:24.355 ******** skipping: [managed-node12] => { "false_condition": "storage_test_volume.thin | bool" } TASK [Establish base value for expected thin pool size] ************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:114 Saturday 28 March 2026 19:28:49 -0400 (0:00:00.098) 0:06:24.454 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Calculate the expected size based on pool size and percentage value - 2] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:121 Saturday 28 March 2026 19:28:49 -0400 (0:00:00.123) 0:06:24.578 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Establish base value for expected thin pool volume size] ***************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:128 Saturday 28 March 2026 19:28:49 -0400 (0:00:00.096) 0:06:24.675 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Calculate the expected thin pool volume size based on percentage value] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:132 Saturday 28 March 2026 19:28:49 -0400 (0:00:00.067) 0:06:24.742 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Replace expected volume size with calculated value] ********************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:138 Saturday 28 March 2026 19:28:49 -0400 (0:00:00.115) 0:06:24.858 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Show actual size] ******************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:144 Saturday 28 March 2026 19:28:49 -0400 (0:00:00.109) 0:06:24.967 ******** ok: [managed-node12] => { "storage_test_actual_size": { "changed": false, "false_condition": "_storage_test_volume_present | bool", "skip_reason": "Conditional result was False", "skipped": true } } TASK [Show expected size - 2] ************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:148 Saturday 28 March 2026 19:28:49 -0400 (0:00:00.094) 0:06:25.062 ******** ok: [managed-node12] => { "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined" } TASK [Assert expected size is actual size] ************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:152 Saturday 28 March 2026 19:28:50 -0400 (0:00:00.123) 0:06:25.186 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present | bool", "skip_reason": "Conditional result was False" } TASK [Get information about the LV] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5 Saturday 28 March 2026 19:28:50 -0400 (0:00:00.083) 0:06:25.269 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13 Saturday 28 March 2026 19:28:50 -0400 (0:00:00.079) 0:06:25.349 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17 Saturday 28 March 2026 19:28:50 -0400 (0:00:00.078) 0:06:25.427 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Set LV cache size] ******************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24 Saturday 28 March 2026 19:28:50 -0400 (0:00:00.049) 0:06:25.477 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Parse the requested cache size] ****************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31 Saturday 28 March 2026 19:28:50 -0400 (0:00:00.089) 0:06:25.567 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Set expected cache size] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37 Saturday 28 March 2026 19:28:50 -0400 (0:00:00.099) 0:06:25.666 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42 Saturday 28 March 2026 19:28:50 -0400 (0:00:00.099) 0:06:25.765 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25 Saturday 28 March 2026 19:28:50 -0400 (0:00:00.084) 0:06:25.850 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Set storage volume test variables] *************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2 Saturday 28 March 2026 19:28:50 -0400 (0:00:00.096) 0:06:25.947 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_volume_present": false, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [Run test verify for storage_test_volume_subset] ************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19 Saturday 28 March 2026 19:28:50 -0400 (0:00:00.123) 0:06:26.070 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node12 => (item=mount) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node12 => (item=fstab) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node12 => (item=fs) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node12 => (item=device) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node12 => (item=encryption) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node12 => (item=md) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node12 => (item=size) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node12 => (item=cache) TASK [Get expected mount device based on device type] ************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7 Saturday 28 March 2026 19:28:51 -0400 (0:00:00.591) 0:06:26.661 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_device_path": "/dev/stratis/foo/test2" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11 Saturday 28 March 2026 19:28:51 -0400 (0:00:00.140) 0:06:26.801 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_mount_expected_mount_point": "/opt/test2", "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Get information about the mountpoint directory] ************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19 Saturday 28 March 2026 19:28:51 -0400 (0:00:00.129) 0:06:26.934 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present | bool", "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by device] ******************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:32 Saturday 28 March 2026 19:28:51 -0400 (0:00:00.057) 0:06:26.991 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Verify mount directory user] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:40 Saturday 28 March 2026 19:28:51 -0400 (0:00:00.069) 0:06:27.060 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Verify mount directory group] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:51 Saturday 28 March 2026 19:28:52 -0400 (0:00:00.078) 0:06:27.138 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Verify mount directory permissions] ************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:62 Saturday 28 March 2026 19:28:52 -0400 (0:00:00.074) 0:06:27.213 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Get path of test volume device] ****************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:76 Saturday 28 March 2026 19:28:52 -0400 (0:00:00.074) 0:06:27.288 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:82 Saturday 28 March 2026 19:28:52 -0400 (0:00:00.070) 0:06:27.359 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:88 Saturday 28 March 2026 19:28:52 -0400 (0:00:00.110) 0:06:27.469 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:98 Saturday 28 March 2026 19:28:52 -0400 (0:00:00.088) 0:06:27.558 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_found_mount_stat": null, "storage_test_mount_expected_mount_point": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2 Saturday 28 March 2026 19:28:52 -0400 (0:00:00.094) 0:06:27.652 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "0", "storage_test_fstab_expected_mount_options_matches": "0", "storage_test_fstab_expected_mount_point_matches": "0", "storage_test_fstab_id_matches": [], "storage_test_fstab_mount_options_matches": [], "storage_test_fstab_mount_point_matches": [] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17 Saturday 28 March 2026 19:28:52 -0400 (0:00:00.194) 0:06:27.847 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present | bool", "skip_reason": "Conditional result was False" } TASK [Verify the fstab mount point] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24 Saturday 28 March 2026 19:28:52 -0400 (0:00:00.069) 0:06:27.916 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33 Saturday 28 March 2026 19:28:52 -0400 (0:00:00.077) 0:06:27.994 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "__storage_verify_mount_options | d(false)", "skip_reason": "Conditional result was False" } TASK [Verify fingerprint] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45 Saturday 28 March 2026 19:28:52 -0400 (0:00:00.061) 0:06:28.055 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Clean up variables] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:52 Saturday 28 March 2026 19:28:53 -0400 (0:00:00.105) 0:06:28.160 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6 Saturday 28 March 2026 19:28:53 -0400 (0:00:00.062) 0:06:28.223 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type != \"stratis\"", "skip_reason": "Conditional result was False" } TASK [Verify fs label] ********************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14 Saturday 28 March 2026 19:28:53 -0400 (0:00:00.060) 0:06:28.284 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type != \"stratis\"", "skip_reason": "Conditional result was False" } TASK [See whether the device node is present] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3 Saturday 28 March 2026 19:28:53 -0400 (0:00:00.067) 0:06:28.351 ******** ok: [managed-node12] => { "changed": false, "stat": { "exists": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9 Saturday 28 March 2026 19:28:53 -0400 (0:00:00.552) 0:06:28.904 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present or storage_test_volume.type == 'disk'", "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the device node - 2] ********************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16 Saturday 28 March 2026 19:28:53 -0400 (0:00:00.086) 0:06:28.991 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about this volume] ********************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23 Saturday 28 March 2026 19:28:53 -0400 (0:00:00.100) 0:06:29.091 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Process volume type (set initial value) (1/2)] *************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29 Saturday 28 March 2026 19:28:54 -0400 (0:00:00.082) 0:06:29.174 ******** ok: [managed-node12] => { "ansible_facts": { "st_volume_type": "stratis" }, "changed": false } TASK [Process volume type (get RAID value) (2/2)] ****************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33 Saturday 28 March 2026 19:28:54 -0400 (0:00:00.086) 0:06:29.260 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == \"raid\"", "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38 Saturday 28 March 2026 19:28:54 -0400 (0:00:00.067) 0:06:29.327 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Stat the LUKS device, if encrypted] ************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3 Saturday 28 March 2026 19:28:54 -0400 (0:00:00.074) 0:06:29.402 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10 Saturday 28 March 2026 19:28:54 -0400 (0:00:00.093) 0:06:29.496 ******** ok: [managed-node12] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: cryptsetup TASK [Collect LUKS info for this volume] *************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16 Saturday 28 March 2026 19:28:55 -0400 (0:00:00.990) 0:06:30.486 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.encryption and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22 Saturday 28 March 2026 19:28:55 -0400 (0:00:00.070) 0:06:30.557 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29 Saturday 28 March 2026 19:28:55 -0400 (0:00:00.054) 0:06:30.613 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40 Saturday 28 March 2026 19:28:55 -0400 (0:00:00.069) 0:06:30.682 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46 Saturday 28 March 2026 19:28:55 -0400 (0:00:00.060) 0:06:30.742 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51 Saturday 28 March 2026 19:28:55 -0400 (0:00:00.057) 0:06:30.800 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:64 Saturday 28 March 2026 19:28:55 -0400 (0:00:00.161) 0:06:30.962 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:77 Saturday 28 March 2026 19:28:55 -0400 (0:00:00.059) 0:06:31.021 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Set test variables] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:90 Saturday 28 March 2026 19:28:55 -0400 (0:00:00.054) 0:06:31.076 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:96 Saturday 28 March 2026 19:28:56 -0400 (0:00:00.114) 0:06:31.190 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:103 Saturday 28 March 2026 19:28:56 -0400 (0:00:00.072) 0:06:31.263 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:111 Saturday 28 March 2026 19:28:56 -0400 (0:00:00.044) 0:06:31.307 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:119 Saturday 28 March 2026 19:28:56 -0400 (0:00:00.049) 0:06:31.356 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:127 Saturday 28 March 2026 19:28:56 -0400 (0:00:00.050) 0:06:31.406 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [Get information about RAID] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8 Saturday 28 March 2026 19:28:56 -0400 (0:00:00.068) 0:06:31.474 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14 Saturday 28 March 2026 19:28:56 -0400 (0:00:00.076) 0:06:31.550 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19 Saturday 28 March 2026 19:28:56 -0400 (0:00:00.058) 0:06:31.609 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24 Saturday 28 March 2026 19:28:56 -0400 (0:00:00.049) 0:06:31.659 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set chunk size regex] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29 Saturday 28 March 2026 19:28:56 -0400 (0:00:00.062) 0:06:31.722 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37 Saturday 28 March 2026 19:28:56 -0400 (0:00:00.046) 0:06:31.769 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46 Saturday 28 March 2026 19:28:56 -0400 (0:00:00.042) 0:06:31.812 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54 Saturday 28 March 2026 19:28:56 -0400 (0:00:00.030) 0:06:31.843 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62 Saturday 28 March 2026 19:28:56 -0400 (0:00:00.044) 0:06:31.888 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70 Saturday 28 March 2026 19:28:56 -0400 (0:00:00.030) 0:06:31.918 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Parse the actual size of the volume] ************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3 Saturday 28 March 2026 19:28:56 -0400 (0:00:00.045) 0:06:31.964 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present | bool", "skip_reason": "Conditional result was False" } TASK [Parse the requested size of the volume] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11 Saturday 28 March 2026 19:28:56 -0400 (0:00:00.048) 0:06:32.012 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present | bool", "skip_reason": "Conditional result was False" } TASK [Establish base value for expected size] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20 Saturday 28 March 2026 19:28:56 -0400 (0:00:00.042) 0:06:32.055 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present | bool", "skip_reason": "Conditional result was False" } TASK [Show expected size] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28 Saturday 28 March 2026 19:28:57 -0400 (0:00:00.044) 0:06:32.100 ******** ok: [managed-node12] => { "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined" } TASK [Get the size of parent/pool device] ************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32 Saturday 28 March 2026 19:28:57 -0400 (0:00:00.049) 0:06:32.149 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present | bool", "skip_reason": "Conditional result was False" } TASK [Show test pool] ********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46 Saturday 28 March 2026 19:28:57 -0400 (0:00:00.041) 0:06:32.191 ******** skipping: [managed-node12] => { "false_condition": "_storage_test_volume_present | bool" } TASK [Show test blockinfo] ***************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50 Saturday 28 March 2026 19:28:57 -0400 (0:00:00.045) 0:06:32.237 ******** skipping: [managed-node12] => { "false_condition": "_storage_test_volume_present | bool" } TASK [Show test pool size] ***************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54 Saturday 28 March 2026 19:28:57 -0400 (0:00:00.042) 0:06:32.279 ******** skipping: [managed-node12] => { "false_condition": "_storage_test_volume_present | bool" } TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58 Saturday 28 March 2026 19:28:57 -0400 (0:00:00.037) 0:06:32.316 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present | bool", "skip_reason": "Conditional result was False" } TASK [Default thin pool reserved space values] ********************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:68 Saturday 28 March 2026 19:28:57 -0400 (0:00:00.028) 0:06:32.345 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Default minimal thin pool reserved space size] *************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:72 Saturday 28 March 2026 19:28:57 -0400 (0:00:00.049) 0:06:32.395 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Default maximal thin pool reserved space size] *************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:77 Saturday 28 March 2026 19:28:57 -0400 (0:00:00.078) 0:06:32.473 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Calculate maximum usable space in thin pool] ***************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:83 Saturday 28 March 2026 19:28:57 -0400 (0:00:00.089) 0:06:32.562 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Apply upper size limit to max usable thin pool space] ******************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:87 Saturday 28 March 2026 19:28:57 -0400 (0:00:00.076) 0:06:32.639 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Apply lower size limit to max usable thin pool space] ******************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:92 Saturday 28 March 2026 19:28:57 -0400 (0:00:00.070) 0:06:32.710 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Convert maximum usable thin pool space from int to Size] ***************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:97 Saturday 28 March 2026 19:28:57 -0400 (0:00:00.052) 0:06:32.762 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Show max thin pool size] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:102 Saturday 28 March 2026 19:28:57 -0400 (0:00:00.070) 0:06:32.833 ******** skipping: [managed-node12] => { "false_condition": "storage_test_volume.thin | bool" } TASK [Show volume thin pool size] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:106 Saturday 28 March 2026 19:28:57 -0400 (0:00:00.062) 0:06:32.895 ******** skipping: [managed-node12] => { "false_condition": "storage_test_volume.thin | bool" } TASK [Show test volume size] *************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:110 Saturday 28 March 2026 19:28:57 -0400 (0:00:00.091) 0:06:32.987 ******** skipping: [managed-node12] => { "false_condition": "storage_test_volume.thin | bool" } TASK [Establish base value for expected thin pool size] ************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:114 Saturday 28 March 2026 19:28:57 -0400 (0:00:00.081) 0:06:33.069 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Calculate the expected size based on pool size and percentage value - 2] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:121 Saturday 28 March 2026 19:28:58 -0400 (0:00:00.063) 0:06:33.132 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Establish base value for expected thin pool volume size] ***************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:128 Saturday 28 March 2026 19:28:58 -0400 (0:00:00.048) 0:06:33.180 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Calculate the expected thin pool volume size based on percentage value] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:132 Saturday 28 March 2026 19:28:58 -0400 (0:00:00.044) 0:06:33.224 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Replace expected volume size with calculated value] ********************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:138 Saturday 28 March 2026 19:28:58 -0400 (0:00:00.055) 0:06:33.280 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Show actual size] ******************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:144 Saturday 28 March 2026 19:28:58 -0400 (0:00:00.053) 0:06:33.333 ******** ok: [managed-node12] => { "storage_test_actual_size": { "changed": false, "false_condition": "_storage_test_volume_present | bool", "skip_reason": "Conditional result was False", "skipped": true } } TASK [Show expected size - 2] ************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:148 Saturday 28 March 2026 19:28:58 -0400 (0:00:00.065) 0:06:33.398 ******** ok: [managed-node12] => { "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined" } TASK [Assert expected size is actual size] ************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:152 Saturday 28 March 2026 19:28:58 -0400 (0:00:00.051) 0:06:33.450 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present | bool", "skip_reason": "Conditional result was False" } TASK [Get information about the LV] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5 Saturday 28 March 2026 19:28:58 -0400 (0:00:00.044) 0:06:33.494 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13 Saturday 28 March 2026 19:28:58 -0400 (0:00:00.029) 0:06:33.524 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17 Saturday 28 March 2026 19:28:58 -0400 (0:00:00.042) 0:06:33.566 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Set LV cache size] ******************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24 Saturday 28 March 2026 19:28:58 -0400 (0:00:00.068) 0:06:33.635 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Parse the requested cache size] ****************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31 Saturday 28 March 2026 19:28:58 -0400 (0:00:00.069) 0:06:33.705 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Set expected cache size] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37 Saturday 28 March 2026 19:28:58 -0400 (0:00:00.072) 0:06:33.778 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42 Saturday 28 March 2026 19:28:58 -0400 (0:00:00.055) 0:06:33.834 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25 Saturday 28 March 2026 19:28:58 -0400 (0:00:00.058) 0:06:33.892 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:43 Saturday 28 March 2026 19:28:58 -0400 (0:00:00.071) 0:06:33.963 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Clean up variable namespace] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:52 Saturday 28 March 2026 19:28:58 -0400 (0:00:00.053) 0:06:34.017 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Create encrypted Stratis pool] ******************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:137 Saturday 28 March 2026 19:28:58 -0400 (0:00:00.056) 0:06:34.074 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tasks/run_role_with_clear_facts.yml for managed-node12 TASK [Clear facts] ************************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tasks/run_role_with_clear_facts.yml:10 Saturday 28 March 2026 19:28:59 -0400 (0:00:00.111) 0:06:34.185 ******** META: facts cleared TASK [Run the role] ************************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tasks/run_role_with_clear_facts.yml:24 Saturday 28 March 2026 19:28:59 -0400 (0:00:00.016) 0:06:34.202 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "__sr_failed_when is defined", "skip_reason": "Conditional result was False" } TASK [Run the role normally] *************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tasks/run_role_with_clear_facts.yml:34 Saturday 28 March 2026 19:28:59 -0400 (0:00:00.038) 0:06:34.240 ******** included: fedora.linux_system_roles.storage for managed-node12 TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Saturday 28 March 2026 19:28:59 -0400 (0:00:00.109) 0:06:34.350 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node12 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Saturday 28 March 2026 19:28:59 -0400 (0:00:00.077) 0:06:34.428 ******** ok: [managed-node12] TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Saturday 28 March 2026 19:29:00 -0400 (0:00:01.090) 0:06:35.518 ******** skipping: [managed-node12] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node12] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node12] => (item=CentOS_10.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-fs", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "xfsprogs", "stratisd", "stratis-cli", "{{ 'libblockdev-s390' if ansible_facts['architecture'] == 's390x' else 'libblockdev' }}", "vdo" ] }, "ansible_included_var_files": [ "/tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node12] => (item=CentOS_10.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-fs", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "xfsprogs", "stratisd", "stratis-cli", "{{ 'libblockdev-s390' if ansible_facts['architecture'] == 's390x' else 'libblockdev' }}", "vdo" ] }, "ansible_included_var_files": [ "/tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Saturday 28 March 2026 19:29:00 -0400 (0:00:00.105) 0:06:35.624 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "not __storage_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Saturday 28 March 2026 19:29:00 -0400 (0:00:00.032) 0:06:35.656 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "not __storage_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Saturday 28 March 2026 19:29:00 -0400 (0:00:00.030) 0:06:35.687 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Saturday 28 March 2026 19:29:00 -0400 (0:00:00.034) 0:06:35.722 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Saturday 28 March 2026 19:29:00 -0400 (0:00:00.025) 0:06:35.747 ******** redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node12 TASK [fedora.linux_system_roles.storage : Add repo key] ************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Saturday 28 March 2026 19:29:00 -0400 (0:00:00.052) 0:06:35.800 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_blivet_custom_repo.key is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Add blivet repo] ********************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:15 Saturday 28 March 2026 19:29:00 -0400 (0:00:00.022) 0:06:35.823 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_blivet_custom_repo.key is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:20 Saturday 28 March 2026 19:29:00 -0400 (0:00:00.021) 0:06:35.844 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:27 Saturday 28 March 2026 19:29:00 -0400 (0:00:00.020) 0:06:35.865 ******** ok: [managed-node12] => { "storage_pools | d([])": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": true, "encryption_password": "yabbadabbadoo", "name": "foo", "type": "stratis", "volumes": [ { "mount_point": "/opt/test1", "name": "test1", "size": "4g" } ] } ] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:32 Saturday 28 March 2026 19:29:00 -0400 (0:00:00.025) 0:06:35.890 ******** ok: [managed-node12] => { "storage_volumes | d([])": [] } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37 Saturday 28 March 2026 19:29:00 -0400 (0:00:00.023) 0:06:35.914 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:50 Saturday 28 March 2026 19:29:00 -0400 (0:00:00.019) 0:06:35.933 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:56 Saturday 28 March 2026 19:29:00 -0400 (0:00:00.018) 0:06:35.952 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:76 Saturday 28 March 2026 19:29:00 -0400 (0:00:00.020) 0:06:35.972 ******** ok: [managed-node12] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "apt-daily.service": { "name": "apt-daily.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fcoe.service": { "name": "fcoe.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "iscsi-shutdown.service": { "name": "iscsi-shutdown.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsi.service": { "name": "iscsi.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsid.service": { "name": "iscsid.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-activation-early.service": { "name": "lvm2-activation-early.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_multipath.service": { "name": "modprobe@dm_multipath.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "raid-check.service": { "name": "raid-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "rbdmap.service": { "name": "rbdmap.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-unix-local@.service": { "name": "sshd-unix-local@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd-vsock@.service": { "name": "sshd-vsock@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "stratis-fstab-setup-with-network@.service": { "name": "stratis-fstab-setup-with-network@.service", "source": "systemd", "state": "unknown", "status": "static" }, "stratis-fstab-setup@.service": { "name": "stratis-fstab-setup@.service", "source": "systemd", "state": "unknown", "status": "static" }, "stratisd-min-postinitrd.service": { "name": "stratisd-min-postinitrd.service", "source": "systemd", "state": "inactive", "status": "static" }, "stratisd.service": { "name": "stratisd.service", "source": "systemd", "state": "running", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-userdbd.service": { "name": "systemd-userdbd.service", "source": "systemd", "state": "running", "status": "indirect" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:82 Saturday 28 March 2026 19:29:02 -0400 (0:00:02.032) 0:06:38.005 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:88 Saturday 28 March 2026 19:29:02 -0400 (0:00:00.030) 0:06:38.036 ******** changed: [managed-node12] => { "actions": [ { "action": "create format", "device": "/dev/sdi", "fs_type": "stratis" }, { "action": "create format", "device": "/dev/sdh", "fs_type": "stratis" }, { "action": "create format", "device": "/dev/sdg", "fs_type": "stratis" }, { "action": "create format", "device": "/dev/sdf", "fs_type": "stratis" }, { "action": "create format", "device": "/dev/sde", "fs_type": "stratis" }, { "action": "create format", "device": "/dev/sdd", "fs_type": "stratis" }, { "action": "create format", "device": "/dev/sdc", "fs_type": "stratis" }, { "action": "create format", "device": "/dev/sdb", "fs_type": "stratis" }, { "action": "create format", "device": "/dev/sda", "fs_type": "stratis" }, { "action": "create device", "device": "/dev/stratis/foo", "fs_type": null }, { "action": "create device", "device": "/dev/stratis/foo/test1", "fs_type": null }, { "action": "create format", "device": "/dev/stratis/foo/test1", "fs_type": "stratis xfs" } ], "changed": true, "crypts": [], "leaves": [ "/dev/xvda1", "/dev/xvda2", "/dev/stratis/foo/test1" ], "mounts": [ { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "UUID=73aed7df-5753-4b99-a608-d0d02019ecf4", "state": "mounted" } ], "packages": [ "stratisd", "xfsprogs", "stratis-cli" ], "pools": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": true, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "stratis", "volumes": [ { "_device": "/dev/stratis/foo/test1", "_kernel_device": "/dev/dm-7", "_mount_id": "UUID=73aed7df-5753-4b99-a608-d0d02019ecf4", "_raw_device": "/dev/stratis/foo/test1", "_raw_kernel_device": "/dev/dm-7", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:103 Saturday 28 March 2026 19:29:31 -0400 (0:00:28.328) 0:07:06.364 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_udevadm_trigger | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ****** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:110 Saturday 28 March 2026 19:29:31 -0400 (0:00:00.022) 0:07:06.386 ******** ok: [managed-node12] => { "changed": false, "stat": { "atime": 1774740508.1639233, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "6aeccfbb3223a634b983c3c21792c1ba90809bb8", "ctime": 1774740502.1553178, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 230686985, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1774740502.1553178, "nlink": 1, "path": "/etc/fstab", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1408, "uid": 0, "version": "1679064280", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:115 Saturday 28 March 2026 19:29:31 -0400 (0:00:00.391) 0:07:06.778 ******** ok: [managed-node12] => { "backup": "", "changed": false } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:133 Saturday 28 March 2026 19:29:32 -0400 (0:00:00.404) 0:07:07.183 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:139 Saturday 28 March 2026 19:29:32 -0400 (0:00:00.031) 0:07:07.215 ******** ok: [managed-node12] => { "blivet_output": { "actions": [ { "action": "create format", "device": "/dev/sdi", "fs_type": "stratis" }, { "action": "create format", "device": "/dev/sdh", "fs_type": "stratis" }, { "action": "create format", "device": "/dev/sdg", "fs_type": "stratis" }, { "action": "create format", "device": "/dev/sdf", "fs_type": "stratis" }, { "action": "create format", "device": "/dev/sde", "fs_type": "stratis" }, { "action": "create format", "device": "/dev/sdd", "fs_type": "stratis" }, { "action": "create format", "device": "/dev/sdc", "fs_type": "stratis" }, { "action": "create format", "device": "/dev/sdb", "fs_type": "stratis" }, { "action": "create format", "device": "/dev/sda", "fs_type": "stratis" }, { "action": "create device", "device": "/dev/stratis/foo", "fs_type": null }, { "action": "create device", "device": "/dev/stratis/foo/test1", "fs_type": null }, { "action": "create format", "device": "/dev/stratis/foo/test1", "fs_type": "stratis xfs" } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/xvda1", "/dev/xvda2", "/dev/stratis/foo/test1" ], "mounts": [ { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "UUID=73aed7df-5753-4b99-a608-d0d02019ecf4", "state": "mounted" } ], "packages": [ "stratisd", "xfsprogs", "stratis-cli" ], "pools": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": true, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "stratis", "volumes": [ { "_device": "/dev/stratis/foo/test1", "_kernel_device": "/dev/dm-7", "_mount_id": "UUID=73aed7df-5753-4b99-a608-d0d02019ecf4", "_raw_device": "/dev/stratis/foo/test1", "_raw_kernel_device": "/dev/dm-7", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148 Saturday 28 March 2026 19:29:32 -0400 (0:00:00.031) 0:07:07.247 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": true, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "stratis", "volumes": [ { "_device": "/dev/stratis/foo/test1", "_kernel_device": "/dev/dm-7", "_mount_id": "UUID=73aed7df-5753-4b99-a608-d0d02019ecf4", "_raw_device": "/dev/stratis/foo/test1", "_raw_kernel_device": "/dev/dm-7", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:152 Saturday 28 March 2026 19:29:32 -0400 (0:00:00.047) 0:07:07.295 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:168 Saturday 28 March 2026 19:29:32 -0400 (0:00:00.065) 0:07:07.360 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:179 Saturday 28 March 2026 19:29:32 -0400 (0:00:00.068) 0:07:07.429 ******** ok: [managed-node12] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:184 Saturday 28 March 2026 19:29:33 -0400 (0:00:00.840) 0:07:08.269 ******** redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount changed: [managed-node12] => (item={'src': 'UUID=73aed7df-5753-4b99-a608-d0d02019ecf4', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => { "ansible_loop_var": "mount_info", "backup_file": "", "boot": "yes", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "xfs", "mount_info": { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "UUID=73aed7df-5753-4b99-a608-d0d02019ecf4", "state": "mounted" }, "name": "/opt/test1", "opts": "defaults", "passno": "0", "src": "UUID=73aed7df-5753-4b99-a608-d0d02019ecf4" } TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195 Saturday 28 March 2026 19:29:33 -0400 (0:00:00.476) 0:07:08.745 ******** skipping: [managed-node12] => (item={'src': 'UUID=73aed7df-5753-4b99-a608-d0d02019ecf4', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => { "ansible_loop_var": "mount_info", "changed": false, "false_condition": "mount_info['owner'] != none or mount_info['group'] != none or mount_info['mode'] != none", "mount_info": { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "UUID=73aed7df-5753-4b99-a608-d0d02019ecf4", "state": "mounted" }, "skip_reason": "Conditional result was False" } skipping: [managed-node12] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:207 Saturday 28 March 2026 19:29:33 -0400 (0:00:00.096) 0:07:08.842 ******** ok: [managed-node12] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:215 Saturday 28 March 2026 19:29:34 -0400 (0:00:00.860) 0:07:09.702 ******** ok: [managed-node12] => { "changed": false, "stat": { "atime": 1774737762.6954868, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1774342704.475, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 4194435, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1774342323.629, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "1224473831", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:220 Saturday 28 March 2026 19:29:35 -0400 (0:00:00.518) 0:07:10.221 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:242 Saturday 28 March 2026 19:29:35 -0400 (0:00:00.039) 0:07:10.260 ******** ok: [managed-node12] TASK [Verify role results - 5] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:151 Saturday 28 March 2026 19:29:36 -0400 (0:00:01.154) 0:07:11.415 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node12 TASK [Print out pool information] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2 Saturday 28 March 2026 19:29:36 -0400 (0:00:00.081) 0:07:11.496 ******** ok: [managed-node12] => { "_storage_pools_list": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": true, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "stratis", "volumes": [ { "_device": "/dev/stratis/foo/test1", "_kernel_device": "/dev/dm-7", "_mount_id": "UUID=73aed7df-5753-4b99-a608-d0d02019ecf4", "_raw_device": "/dev/stratis/foo/test1", "_raw_kernel_device": "/dev/dm-7", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } ] } ] } TASK [Print out volume information] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7 Saturday 28 March 2026 19:29:36 -0400 (0:00:00.048) 0:07:11.545 ******** skipping: [managed-node12] => { "false_condition": "_storage_volumes_list | length > 0" } TASK [Collect info about the volumes.] ***************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15 Saturday 28 March 2026 19:29:36 -0400 (0:00:00.055) 0:07:11.601 ******** ok: [managed-node12] => { "changed": false, "info": { "/dev/loop0": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/loop0", "size": "0B", "type": "loop", "uuid": "" }, "/dev/mapper/stratis-1-private-2818607ade064e9c8af91e98a66ed410-crypt": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/mapper/stratis-1-private-2818607ade064e9c8af91e98a66ed410-crypt", "size": "52G", "type": "crypt", "uuid": "" }, "/dev/mapper/stratis-1-private-2818607ade064e9c8af91e98a66ed410-flex-mdv": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/mapper/stratis-1-private-2818607ade064e9c8af91e98a66ed410-flex-mdv", "size": "512M", "type": "stratis", "uuid": "" }, "/dev/mapper/stratis-1-private-2818607ade064e9c8af91e98a66ed410-flex-thindata": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/mapper/stratis-1-private-2818607ade064e9c8af91e98a66ed410-flex-thindata", "size": "50G", "type": "stratis", "uuid": "" }, "/dev/mapper/stratis-1-private-2818607ade064e9c8af91e98a66ed410-flex-thinmeta": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/mapper/stratis-1-private-2818607ade064e9c8af91e98a66ed410-flex-thinmeta", "size": "786M", "type": "stratis", "uuid": "" }, "/dev/mapper/stratis-1-private-2818607ade064e9c8af91e98a66ed410-physical-cache": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/mapper/stratis-1-private-2818607ade064e9c8af91e98a66ed410-physical-cache", "size": "52.1G", "type": "stratis", "uuid": "" }, "/dev/mapper/stratis-1-private-2818607ade064e9c8af91e98a66ed410-physical-originsub": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/mapper/stratis-1-private-2818607ade064e9c8af91e98a66ed410-physical-originsub", "size": "52.1G", "type": "stratis", "uuid": "" }, "/dev/mapper/stratis-1-private-2818607ade064e9c8af91e98a66ed410-thinpool-pool": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/mapper/stratis-1-private-2818607ade064e9c8af91e98a66ed410-thinpool-pool", "size": "50G", "type": "stratis", "uuid": "" }, "/dev/sda": { "fstype": "stratis", "label": "", "mountpoint": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "7d6c178e-9ecc-4871-8591-eb6160c0dfc4" }, "/dev/sdb": { "fstype": "stratis", "label": "", "mountpoint": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "e60c10f9-ecec-4400-afa0-421adab84c38" }, "/dev/sdc": { "fstype": "stratis", "label": "", "mountpoint": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "ce43694f-be78-443b-ab4a-ee11137ae8a7" }, "/dev/sdd": { "fstype": "stratis", "label": "", "mountpoint": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "e36f5949-1179-4998-af32-2efdfe008c8d" }, "/dev/sde": { "fstype": "stratis", "label": "", "mountpoint": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "9bab2aaf-afcf-446e-bee3-441d26ffec5b" }, "/dev/sdf": { "fstype": "stratis", "label": "", "mountpoint": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "c3bb8276-600e-40a4-8da8-571ca56edb00" }, "/dev/sdg": { "fstype": "stratis", "label": "", "mountpoint": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "e57a51f2-445c-4ea2-8313-77ae08fa3917" }, "/dev/sdh": { "fstype": "stratis", "label": "", "mountpoint": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "60dc9f49-23bc-4127-929c-f17a3b3f1d74" }, "/dev/sdi": { "fstype": "stratis", "label": "", "mountpoint": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "390d2998-8451-4939-9793-c6ee49a53091" }, "/dev/stratis/foo/test1": { "fstype": "xfs", "label": "", "mountpoint": "/opt/test1", "name": "/dev/stratis/foo/test1", "size": "4G", "type": "stratis", "uuid": "73aed7df-5753-4b99-a608-d0d02019ecf4" }, "/dev/xvda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda1", "size": "1M", "type": "partition", "uuid": "" }, "/dev/xvda2": { "fstype": "xfs", "label": "", "mountpoint": "/", "name": "/dev/xvda2", "size": "250G", "type": "partition", "uuid": "94fc577e-f1df-44bb-8e86-63b9a68f8f7f" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20 Saturday 28 March 2026 19:29:37 -0400 (0:00:00.515) 0:07:12.116 ******** ok: [managed-node12] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.003024", "end": "2026-03-28 19:29:37.456882", "rc": 0, "start": "2026-03-28 19:29:37.453858" } STDOUT: # system_role:storage # # /etc/fstab # Created by anaconda on Tue Mar 24 08:52:03 2026 # # Accessible filesystems, by reference, are maintained under '/dev/disk/'. # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info. # # After editing this file, run 'systemctl daemon-reload' to update systemd # units generated from this file. # UUID=94fc577e-f1df-44bb-8e86-63b9a68f8f7f / xfs defaults 0 0 ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 UUID=73aed7df-5753-4b99-a608-d0d02019ecf4 /opt/test1 xfs defaults 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25 Saturday 28 March 2026 19:29:37 -0400 (0:00:00.526) 0:07:12.643 ******** ok: [managed-node12] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.002936", "end": "2026-03-28 19:29:37.973107", "failed_when_result": false, "rc": 0, "start": "2026-03-28 19:29:37.970171" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34 Saturday 28 March 2026 19:29:38 -0400 (0:00:00.510) 0:07:13.153 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node12 => (item={'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'encryption': True, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER', 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'stratis', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'part_type': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=73aed7df-5753-4b99-a608-d0d02019ecf4', '_kernel_device': '/dev/dm-7', '_raw_kernel_device': '/dev/dm-7'}]}) TASK [Set _storage_pool_tests] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5 Saturday 28 March 2026 19:29:38 -0400 (0:00:00.085) 0:07:13.239 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Get VG shared value status] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18 Saturday 28 March 2026 19:29:38 -0400 (0:00:00.040) 0:07:13.279 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'", "skip_reason": "Conditional result was False" } TASK [Verify that VG shared value checks out] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24 Saturday 28 March 2026 19:29:38 -0400 (0:00:00.031) 0:07:13.311 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'", "skip_reason": "Conditional result was False" } TASK [Verify pool subset] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34 Saturday 28 March 2026 19:29:38 -0400 (0:00:00.032) 0:07:13.344 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node12 => (item=members) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node12 => (item=volumes) TASK [Set test variables] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2 Saturday 28 March 2026 19:29:38 -0400 (0:00:00.106) 0:07:13.451 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Get the canonical device path for each member device] ******************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8 Saturday 28 March 2026 19:29:38 -0400 (0:00:00.073) 0:07:13.524 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Set pvs lvm length] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17 Saturday 28 March 2026 19:29:38 -0400 (0:00:00.050) 0:07:13.575 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Set pool pvs] ************************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22 Saturday 28 March 2026 19:29:38 -0400 (0:00:00.037) 0:07:13.612 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Verify PV count] ********************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27 Saturday 28 March 2026 19:29:38 -0400 (0:00:00.030) 0:07:13.643 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Set expected pv type] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36 Saturday 28 March 2026 19:29:38 -0400 (0:00:00.037) 0:07:13.681 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Set expected pv type - 2] ************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41 Saturday 28 March 2026 19:29:38 -0400 (0:00:00.047) 0:07:13.729 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm' and not storage_test_pool.encryption", "skip_reason": "Conditional result was False" } TASK [Set expected pv type - 3] ************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46 Saturday 28 March 2026 19:29:38 -0400 (0:00:00.036) 0:07:13.765 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:55 Saturday 28 March 2026 19:29:38 -0400 (0:00:00.021) 0:07:13.787 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Check that blivet supports PV grow to fill] ****************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:68 Saturday 28 March 2026 19:29:38 -0400 (0:00:00.023) 0:07:13.810 ******** ok: [managed-node12] => { "changed": false, "failed_when_result": false, "rc": 0 } STDERR: OpenSSH_9.9p1, OpenSSL 3.5.5 27 Jan 2026 debug1: Reading configuration data /root/.ssh/config debug1: Reading configuration data /etc/ssh/ssh_config debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf debug2: checking match for 'final all' host 10.31.11.27 originally 10.31.11.27 debug2: match not found debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config debug1: configuration requests final Match pass debug2: resolve_canonicalize: hostname 10.31.11.27 is address debug1: re-parsing configuration debug1: Reading configuration data /root/.ssh/config debug1: Reading configuration data /etc/ssh/ssh_config debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf debug2: checking match for 'final all' host 10.31.11.27 originally 10.31.11.27 debug2: match found debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config debug1: auto-mux: Trying existing master at '/root/.ansible/cp/e1141b92e1' debug2: fd 3 setting O_NONBLOCK debug2: mux_client_hello_exchange: master version 4 debug1: mux_client_request_session: master session id: 2 debug2: Received exit status from master 0 Shared connection to 10.31.11.27 closed. TASK [Verify that PVs fill the whole devices when they should] ***************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:78 Saturday 28 March 2026 19:29:39 -0400 (0:00:00.444) 0:07:14.255 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Check MD RAID] *********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:88 Saturday 28 March 2026 19:29:39 -0400 (0:00:00.026) 0:07:14.282 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node12 TASK [Get information about RAID] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8 Saturday 28 March 2026 19:29:39 -0400 (0:00:00.066) 0:07:14.348 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14 Saturday 28 March 2026 19:29:39 -0400 (0:00:00.032) 0:07:14.381 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19 Saturday 28 March 2026 19:29:39 -0400 (0:00:00.030) 0:07:14.412 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24 Saturday 28 March 2026 19:29:39 -0400 (0:00:00.030) 0:07:14.442 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set md chunk size regex] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29 Saturday 28 March 2026 19:29:39 -0400 (0:00:00.031) 0:07:14.473 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37 Saturday 28 March 2026 19:29:39 -0400 (0:00:00.030) 0:07:14.504 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46 Saturday 28 March 2026 19:29:39 -0400 (0:00:00.029) 0:07:14.534 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55 Saturday 28 March 2026 19:29:39 -0400 (0:00:00.028) 0:07:14.563 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64 Saturday 28 March 2026 19:29:39 -0400 (0:00:00.107) 0:07:14.670 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74 Saturday 28 March 2026 19:29:39 -0400 (0:00:00.040) 0:07:14.711 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83 Saturday 28 March 2026 19:29:39 -0400 (0:00:00.033) 0:07:14.745 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_chunk_size_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:91 Saturday 28 March 2026 19:29:39 -0400 (0:00:00.038) 0:07:14.784 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node12 TASK [Validate pool member LVM RAID settings] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2 Saturday 28 March 2026 19:29:39 -0400 (0:00:00.071) 0:07:14.856 ******** skipping: [managed-node12] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'part_type': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=73aed7df-5753-4b99-a608-d0d02019ecf4', '_kernel_device': '/dev/dm-7', '_raw_kernel_device': '/dev/dm-7'}) => { "ansible_loop_var": "storage_test_lvmraid_volume", "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False", "storage_test_lvmraid_volume": { "_device": "/dev/stratis/foo/test1", "_kernel_device": "/dev/dm-7", "_mount_id": "UUID=73aed7df-5753-4b99-a608-d0d02019ecf4", "_raw_device": "/dev/stratis/foo/test1", "_raw_kernel_device": "/dev/dm-7", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } } skipping: [managed-node12] => { "changed": false } MSG: All items skipped TASK [Check Thin Pools] ******************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:94 Saturday 28 March 2026 19:29:39 -0400 (0:00:00.069) 0:07:14.925 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node12 TASK [Validate pool member thinpool settings] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2 Saturday 28 March 2026 19:29:39 -0400 (0:00:00.166) 0:07:15.091 ******** skipping: [managed-node12] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'part_type': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=73aed7df-5753-4b99-a608-d0d02019ecf4', '_kernel_device': '/dev/dm-7', '_raw_kernel_device': '/dev/dm-7'}) => { "ansible_loop_var": "storage_test_thin_volume", "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False", "storage_test_thin_volume": { "_device": "/dev/stratis/foo/test1", "_kernel_device": "/dev/dm-7", "_mount_id": "UUID=73aed7df-5753-4b99-a608-d0d02019ecf4", "_raw_device": "/dev/stratis/foo/test1", "_raw_kernel_device": "/dev/dm-7", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } } skipping: [managed-node12] => { "changed": false } MSG: All items skipped TASK [Check member encryption] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:97 Saturday 28 March 2026 19:29:40 -0400 (0:00:00.091) 0:07:15.183 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node12 TASK [Set test variables] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5 Saturday 28 March 2026 19:29:40 -0400 (0:00:00.141) 0:07:15.324 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "1", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10 Saturday 28 March 2026 19:29:40 -0400 (0:00:00.117) 0:07:15.441 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Validate pool member crypttab entries] *********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17 Saturday 28 March 2026 19:29:40 -0400 (0:00:00.063) 0:07:15.505 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24 Saturday 28 March 2026 19:29:40 -0400 (0:00:00.047) 0:07:15.552 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:100 Saturday 28 March 2026 19:29:40 -0400 (0:00:00.100) 0:07:15.653 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node12 TASK [Validate pool member VDO settings] *************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2 Saturday 28 March 2026 19:29:40 -0400 (0:00:00.208) 0:07:15.861 ******** skipping: [managed-node12] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'part_type': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=73aed7df-5753-4b99-a608-d0d02019ecf4', '_kernel_device': '/dev/dm-7', '_raw_kernel_device': '/dev/dm-7'}) => { "ansible_loop_var": "storage_test_vdo_volume", "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False", "storage_test_vdo_volume": { "_device": "/dev/stratis/foo/test1", "_kernel_device": "/dev/dm-7", "_mount_id": "UUID=73aed7df-5753-4b99-a608-d0d02019ecf4", "_raw_device": "/dev/stratis/foo/test1", "_raw_kernel_device": "/dev/dm-7", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } } skipping: [managed-node12] => { "changed": false } MSG: All items skipped TASK [Check Stratis] *********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:103 Saturday 28 March 2026 19:29:40 -0400 (0:00:00.115) 0:07:15.977 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node12 TASK [Get stratis pool information] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6 Saturday 28 March 2026 19:29:41 -0400 (0:00:00.180) 0:07:16.157 ******** ok: [managed-node12] => { "changed": false, "rc": 0 } STDOUT: {"name": "foo", "encrypted": true, "key_desc": "blivet-foo", "clevis_pin": null, "clevis_args": {}} STDERR: OpenSSH_9.9p1, OpenSSL 3.5.5 27 Jan 2026 debug1: Reading configuration data /root/.ssh/config debug1: Reading configuration data /etc/ssh/ssh_config debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf debug2: checking match for 'final all' host 10.31.11.27 originally 10.31.11.27 debug2: match not found debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config debug1: configuration requests final Match pass debug2: resolve_canonicalize: hostname 10.31.11.27 is address debug1: re-parsing configuration debug1: Reading configuration data /root/.ssh/config debug1: Reading configuration data /etc/ssh/ssh_config debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf debug2: checking match for 'final all' host 10.31.11.27 originally 10.31.11.27 debug2: match found debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config debug1: auto-mux: Trying existing master at '/root/.ansible/cp/e1141b92e1' debug2: fd 3 setting O_NONBLOCK debug2: mux_client_hello_exchange: master version 4 debug1: mux_client_request_session: master session id: 2 debug2: Received exit status from master 0 Shared connection to 10.31.11.27 closed. TASK [Print script output] ***************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15 Saturday 28 March 2026 19:29:41 -0400 (0:00:00.579) 0:07:16.736 ******** ok: [managed-node12] => {} MSG: {'name': 'foo', 'encrypted': True, 'key_desc': 'blivet-foo', 'clevis_pin': None, 'clevis_args': {}} TASK [Get information about Stratis] ******************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:19 Saturday 28 March 2026 19:29:41 -0400 (0:00:00.035) 0:07:16.772 ******** ok: [managed-node12] => { "ansible_facts": { "_stratis_pool_info": { "clevis_args": {}, "clevis_pin": null, "encrypted": true, "key_desc": "blivet-foo", "name": "foo" } }, "changed": false } TASK [Verify that the pools was created] *************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:23 Saturday 28 March 2026 19:29:41 -0400 (0:00:00.031) 0:07:16.804 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Verify that encryption is correctly set] ********************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:30 Saturday 28 March 2026 19:29:41 -0400 (0:00:00.035) 0:07:16.840 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Verify that Clevis/Tang encryption is correctly set] ********************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:39 Saturday 28 March 2026 19:29:41 -0400 (0:00:00.039) 0:07:16.879 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.encryption_clevis_pin == 'tang'", "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:49 Saturday 28 March 2026 19:29:41 -0400 (0:00:00.032) 0:07:16.912 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_stratis_report": null }, "changed": false } TASK [Clean up test variables] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:106 Saturday 28 March 2026 19:29:41 -0400 (0:00:00.038) 0:07:16.950 ******** ok: [managed-node12] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3 Saturday 28 March 2026 19:29:41 -0400 (0:00:00.032) 0:07:16.982 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node12 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'part_type': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=73aed7df-5753-4b99-a608-d0d02019ecf4', '_kernel_device': '/dev/dm-7', '_raw_kernel_device': '/dev/dm-7'}) TASK [Set storage volume test variables] *************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2 Saturday 28 March 2026 19:29:41 -0400 (0:00:00.057) 0:07:17.039 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_volume_present": true, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [Run test verify for storage_test_volume_subset] ************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19 Saturday 28 March 2026 19:29:41 -0400 (0:00:00.028) 0:07:17.068 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node12 => (item=mount) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node12 => (item=fstab) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node12 => (item=fs) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node12 => (item=device) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node12 => (item=encryption) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node12 => (item=md) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node12 => (item=size) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node12 => (item=cache) TASK [Get expected mount device based on device type] ************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7 Saturday 28 March 2026 19:29:42 -0400 (0:00:00.276) 0:07:17.344 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_device_path": "/dev/stratis/foo/test1" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11 Saturday 28 March 2026 19:29:42 -0400 (0:00:00.053) 0:07:17.397 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_mount_expected_mount_point": "/opt/test1", "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Get information about the mountpoint directory] ************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19 Saturday 28 March 2026 19:29:42 -0400 (0:00:00.073) 0:07:17.470 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "(not storage_test_volume.mount_user is none and storage_test_volume.mount_user | length > 0) or (not storage_test_volume.mount_group is none and storage_test_volume.mount_group | length > 0) or (not storage_test_volume.mount_mode is none and storage_test_volume.mount_mode | length > 0)", "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by device] ******************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:32 Saturday 28 March 2026 19:29:42 -0400 (0:00:00.081) 0:07:17.552 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Verify mount directory user] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:40 Saturday 28 March 2026 19:29:42 -0400 (0:00:00.079) 0:07:17.632 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "not storage_test_volume.mount_user is none", "skip_reason": "Conditional result was False" } TASK [Verify mount directory group] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:51 Saturday 28 March 2026 19:29:42 -0400 (0:00:00.094) 0:07:17.727 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "not storage_test_volume.mount_group is none", "skip_reason": "Conditional result was False" } TASK [Verify mount directory permissions] ************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:62 Saturday 28 March 2026 19:29:42 -0400 (0:00:00.078) 0:07:17.805 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "not storage_test_volume.mount_mode is none", "skip_reason": "Conditional result was False" } TASK [Get path of test volume device] ****************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:76 Saturday 28 March 2026 19:29:42 -0400 (0:00:00.109) 0:07:17.914 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:82 Saturday 28 March 2026 19:29:42 -0400 (0:00:00.038) 0:07:17.953 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:88 Saturday 28 March 2026 19:29:42 -0400 (0:00:00.054) 0:07:18.007 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:98 Saturday 28 March 2026 19:29:42 -0400 (0:00:00.062) 0:07:18.070 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_found_mount_stat": null, "storage_test_mount_expected_mount_point": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2 Saturday 28 March 2026 19:29:43 -0400 (0:00:00.071) 0:07:18.141 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "1", "storage_test_fstab_expected_mount_options_matches": "1", "storage_test_fstab_expected_mount_point_matches": "1", "storage_test_fstab_id_matches": [ "UUID=73aed7df-5753-4b99-a608-d0d02019ecf4 " ], "storage_test_fstab_mount_options_matches": [ " /opt/test1 xfs defaults " ], "storage_test_fstab_mount_point_matches": [ " /opt/test1 " ] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17 Saturday 28 March 2026 19:29:43 -0400 (0:00:00.139) 0:07:18.281 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Verify the fstab mount point] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24 Saturday 28 March 2026 19:29:43 -0400 (0:00:00.073) 0:07:18.355 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33 Saturday 28 March 2026 19:29:43 -0400 (0:00:00.087) 0:07:18.442 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "__storage_verify_mount_options | d(false)", "skip_reason": "Conditional result was False" } TASK [Verify fingerprint] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45 Saturday 28 March 2026 19:29:43 -0400 (0:00:00.070) 0:07:18.513 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Clean up variables] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:52 Saturday 28 March 2026 19:29:43 -0400 (0:00:00.068) 0:07:18.582 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6 Saturday 28 March 2026 19:29:43 -0400 (0:00:00.099) 0:07:18.681 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type != \"stratis\"", "skip_reason": "Conditional result was False" } TASK [Verify fs label] ********************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14 Saturday 28 March 2026 19:29:43 -0400 (0:00:00.052) 0:07:18.733 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type != \"stratis\"", "skip_reason": "Conditional result was False" } TASK [See whether the device node is present] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3 Saturday 28 March 2026 19:29:43 -0400 (0:00:00.079) 0:07:18.813 ******** ok: [managed-node12] => { "changed": false, "stat": { "atime": 1774740571.1487584, "attr_flags": "", "attributes": [], "block_size": 512, "blocks": 0, "charset": "binary", "ctime": 1774740571.1487584, "dev": 6, "device_type": 64775, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 1632, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/symlink", "mode": "0660", "mtime": 1774740571.1487584, "nlink": 1, "path": "/dev/stratis/foo/test1", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9 Saturday 28 March 2026 19:29:44 -0400 (0:00:00.531) 0:07:19.344 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Verify the presence/absence of the device node - 2] ********************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16 Saturday 28 March 2026 19:29:44 -0400 (0:00:00.072) 0:07:19.417 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "not (_storage_test_volume_present or storage_test_volume.type == 'disk')", "skip_reason": "Conditional result was False" } TASK [Make sure we got info about this volume] ********************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23 Saturday 28 March 2026 19:29:44 -0400 (0:00:00.042) 0:07:19.460 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Process volume type (set initial value) (1/2)] *************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29 Saturday 28 March 2026 19:29:44 -0400 (0:00:00.045) 0:07:19.505 ******** ok: [managed-node12] => { "ansible_facts": { "st_volume_type": "stratis" }, "changed": false } TASK [Process volume type (get RAID value) (2/2)] ****************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33 Saturday 28 March 2026 19:29:44 -0400 (0:00:00.048) 0:07:19.554 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == \"raid\"", "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38 Saturday 28 March 2026 19:29:44 -0400 (0:00:00.043) 0:07:19.598 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Stat the LUKS device, if encrypted] ************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3 Saturday 28 March 2026 19:29:44 -0400 (0:00:00.065) 0:07:19.663 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10 Saturday 28 March 2026 19:29:44 -0400 (0:00:00.031) 0:07:19.694 ******** ok: [managed-node12] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: cryptsetup TASK [Collect LUKS info for this volume] *************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16 Saturday 28 March 2026 19:29:45 -0400 (0:00:00.881) 0:07:20.576 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.encryption and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22 Saturday 28 March 2026 19:29:45 -0400 (0:00:00.029) 0:07:20.605 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29 Saturday 28 March 2026 19:29:45 -0400 (0:00:00.020) 0:07:20.626 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40 Saturday 28 March 2026 19:29:45 -0400 (0:00:00.030) 0:07:20.656 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46 Saturday 28 March 2026 19:29:45 -0400 (0:00:00.020) 0:07:20.677 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51 Saturday 28 March 2026 19:29:45 -0400 (0:00:00.019) 0:07:20.696 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:64 Saturday 28 March 2026 19:29:45 -0400 (0:00:00.018) 0:07:20.715 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:77 Saturday 28 March 2026 19:29:45 -0400 (0:00:00.019) 0:07:20.734 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Set test variables] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:90 Saturday 28 March 2026 19:29:45 -0400 (0:00:00.028) 0:07:20.763 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:96 Saturday 28 March 2026 19:29:45 -0400 (0:00:00.029) 0:07:20.792 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:103 Saturday 28 March 2026 19:29:45 -0400 (0:00:00.025) 0:07:20.817 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:111 Saturday 28 March 2026 19:29:45 -0400 (0:00:00.020) 0:07:20.838 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:119 Saturday 28 March 2026 19:29:45 -0400 (0:00:00.023) 0:07:20.862 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:127 Saturday 28 March 2026 19:29:45 -0400 (0:00:00.020) 0:07:20.882 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [Get information about RAID] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8 Saturday 28 March 2026 19:29:45 -0400 (0:00:00.021) 0:07:20.904 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14 Saturday 28 March 2026 19:29:45 -0400 (0:00:00.018) 0:07:20.923 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19 Saturday 28 March 2026 19:29:45 -0400 (0:00:00.019) 0:07:20.942 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24 Saturday 28 March 2026 19:29:45 -0400 (0:00:00.018) 0:07:20.961 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set chunk size regex] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29 Saturday 28 March 2026 19:29:45 -0400 (0:00:00.018) 0:07:20.979 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37 Saturday 28 March 2026 19:29:45 -0400 (0:00:00.019) 0:07:20.998 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46 Saturday 28 March 2026 19:29:45 -0400 (0:00:00.019) 0:07:21.017 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54 Saturday 28 March 2026 19:29:45 -0400 (0:00:00.018) 0:07:21.036 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62 Saturday 28 March 2026 19:29:45 -0400 (0:00:00.019) 0:07:21.056 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70 Saturday 28 March 2026 19:29:45 -0400 (0:00:00.026) 0:07:21.082 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Parse the actual size of the volume] ************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3 Saturday 28 March 2026 19:29:46 -0400 (0:00:00.020) 0:07:21.103 ******** ok: [managed-node12] => { "bytes": 4294967296, "changed": false, "lvm": "4g", "parted": "4GiB", "size": "4 GiB" } TASK [Parse the requested size of the volume] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11 Saturday 28 March 2026 19:29:46 -0400 (0:00:00.475) 0:07:21.578 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == \"lvm\"", "skip_reason": "Conditional result was False" } TASK [Establish base value for expected size] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20 Saturday 28 March 2026 19:29:46 -0400 (0:00:00.060) 0:07:21.638 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == \"lvm\"", "skip_reason": "Conditional result was False" } TASK [Show expected size] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28 Saturday 28 March 2026 19:29:46 -0400 (0:00:00.063) 0:07:21.702 ******** ok: [managed-node12] => { "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined" } TASK [Get the size of parent/pool device] ************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32 Saturday 28 March 2026 19:29:46 -0400 (0:00:00.076) 0:07:21.779 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == \"lvm\"", "skip_reason": "Conditional result was False" } TASK [Show test pool] ********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46 Saturday 28 March 2026 19:29:46 -0400 (0:00:00.061) 0:07:21.840 ******** skipping: [managed-node12] => { "false_condition": "storage_test_volume.type == \"lvm\"" } TASK [Show test blockinfo] ***************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50 Saturday 28 March 2026 19:29:46 -0400 (0:00:00.066) 0:07:21.906 ******** skipping: [managed-node12] => { "false_condition": "storage_test_volume.type == \"lvm\"" } TASK [Show test pool size] ***************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54 Saturday 28 March 2026 19:29:46 -0400 (0:00:00.056) 0:07:21.963 ******** skipping: [managed-node12] => { "false_condition": "storage_test_volume.type == \"lvm\"" } TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58 Saturday 28 March 2026 19:29:46 -0400 (0:00:00.066) 0:07:22.030 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == \"lvm\"", "skip_reason": "Conditional result was False" } TASK [Default thin pool reserved space values] ********************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:68 Saturday 28 March 2026 19:29:47 -0400 (0:00:00.078) 0:07:22.108 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Default minimal thin pool reserved space size] *************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:72 Saturday 28 March 2026 19:29:47 -0400 (0:00:00.068) 0:07:22.176 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Default maximal thin pool reserved space size] *************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:77 Saturday 28 March 2026 19:29:47 -0400 (0:00:00.055) 0:07:22.232 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Calculate maximum usable space in thin pool] ***************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:83 Saturday 28 March 2026 19:29:47 -0400 (0:00:00.047) 0:07:22.279 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Apply upper size limit to max usable thin pool space] ******************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:87 Saturday 28 March 2026 19:29:47 -0400 (0:00:00.066) 0:07:22.346 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Apply lower size limit to max usable thin pool space] ******************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:92 Saturday 28 March 2026 19:29:47 -0400 (0:00:00.066) 0:07:22.412 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Convert maximum usable thin pool space from int to Size] ***************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:97 Saturday 28 March 2026 19:29:47 -0400 (0:00:00.064) 0:07:22.477 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Show max thin pool size] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:102 Saturday 28 March 2026 19:29:47 -0400 (0:00:00.041) 0:07:22.519 ******** skipping: [managed-node12] => { "false_condition": "storage_test_volume.thin | bool" } TASK [Show volume thin pool size] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:106 Saturday 28 March 2026 19:29:47 -0400 (0:00:00.044) 0:07:22.564 ******** skipping: [managed-node12] => { "false_condition": "storage_test_volume.thin | bool" } TASK [Show test volume size] *************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:110 Saturday 28 March 2026 19:29:47 -0400 (0:00:00.050) 0:07:22.614 ******** skipping: [managed-node12] => { "false_condition": "storage_test_volume.thin | bool" } TASK [Establish base value for expected thin pool size] ************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:114 Saturday 28 March 2026 19:29:47 -0400 (0:00:00.026) 0:07:22.641 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Calculate the expected size based on pool size and percentage value - 2] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:121 Saturday 28 March 2026 19:29:47 -0400 (0:00:00.027) 0:07:22.668 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Establish base value for expected thin pool volume size] ***************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:128 Saturday 28 March 2026 19:29:47 -0400 (0:00:00.026) 0:07:22.695 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Calculate the expected thin pool volume size based on percentage value] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:132 Saturday 28 March 2026 19:29:47 -0400 (0:00:00.027) 0:07:22.722 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Replace expected volume size with calculated value] ********************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:138 Saturday 28 March 2026 19:29:47 -0400 (0:00:00.025) 0:07:22.748 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Show actual size] ******************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:144 Saturday 28 March 2026 19:29:47 -0400 (0:00:00.025) 0:07:22.773 ******** ok: [managed-node12] => { "storage_test_actual_size": { "bytes": 4294967296, "changed": false, "failed": false, "lvm": "4g", "parted": "4GiB", "size": "4 GiB" } } TASK [Show expected size - 2] ************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:148 Saturday 28 March 2026 19:29:47 -0400 (0:00:00.023) 0:07:22.797 ******** ok: [managed-node12] => { "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined" } TASK [Assert expected size is actual size] ************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:152 Saturday 28 March 2026 19:29:47 -0400 (0:00:00.031) 0:07:22.829 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == \"lvm\"", "skip_reason": "Conditional result was False" } TASK [Get information about the LV] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5 Saturday 28 March 2026 19:29:47 -0400 (0:00:00.021) 0:07:22.851 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13 Saturday 28 March 2026 19:29:47 -0400 (0:00:00.019) 0:07:22.870 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17 Saturday 28 March 2026 19:29:47 -0400 (0:00:00.019) 0:07:22.889 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Set LV cache size] ******************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24 Saturday 28 March 2026 19:29:47 -0400 (0:00:00.018) 0:07:22.908 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Parse the requested cache size] ****************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31 Saturday 28 March 2026 19:29:47 -0400 (0:00:00.018) 0:07:22.927 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Set expected cache size] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37 Saturday 28 March 2026 19:29:47 -0400 (0:00:00.019) 0:07:22.946 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42 Saturday 28 March 2026 19:29:47 -0400 (0:00:00.023) 0:07:22.969 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25 Saturday 28 March 2026 19:29:47 -0400 (0:00:00.020) 0:07:22.990 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:43 Saturday 28 March 2026 19:29:47 -0400 (0:00:00.035) 0:07:23.026 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Clean up variable namespace] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:52 Saturday 28 March 2026 19:29:47 -0400 (0:00:00.028) 0:07:23.055 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Repeat the previous invocation to verify idempotence - 2] **************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:154 Saturday 28 March 2026 19:29:47 -0400 (0:00:00.038) 0:07:23.093 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tasks/run_role_with_clear_facts.yml for managed-node12 TASK [Clear facts] ************************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tasks/run_role_with_clear_facts.yml:10 Saturday 28 March 2026 19:29:48 -0400 (0:00:00.052) 0:07:23.145 ******** META: facts cleared TASK [Run the role] ************************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tasks/run_role_with_clear_facts.yml:24 Saturday 28 March 2026 19:29:48 -0400 (0:00:00.011) 0:07:23.157 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "__sr_failed_when is defined", "skip_reason": "Conditional result was False" } TASK [Run the role normally] *************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tasks/run_role_with_clear_facts.yml:34 Saturday 28 March 2026 19:29:48 -0400 (0:00:00.031) 0:07:23.188 ******** included: fedora.linux_system_roles.storage for managed-node12 TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Saturday 28 March 2026 19:29:48 -0400 (0:00:00.047) 0:07:23.236 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node12 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Saturday 28 March 2026 19:29:48 -0400 (0:00:00.035) 0:07:23.271 ******** ok: [managed-node12] TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Saturday 28 March 2026 19:29:49 -0400 (0:00:01.042) 0:07:24.313 ******** skipping: [managed-node12] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node12] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node12] => (item=CentOS_10.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-fs", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "xfsprogs", "stratisd", "stratis-cli", "{{ 'libblockdev-s390' if ansible_facts['architecture'] == 's390x' else 'libblockdev' }}", "vdo" ] }, "ansible_included_var_files": [ "/tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node12] => (item=CentOS_10.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-fs", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "xfsprogs", "stratisd", "stratis-cli", "{{ 'libblockdev-s390' if ansible_facts['architecture'] == 's390x' else 'libblockdev' }}", "vdo" ] }, "ansible_included_var_files": [ "/tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Saturday 28 March 2026 19:29:49 -0400 (0:00:00.063) 0:07:24.377 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "not __storage_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Saturday 28 March 2026 19:29:49 -0400 (0:00:00.020) 0:07:24.397 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "not __storage_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Saturday 28 March 2026 19:29:49 -0400 (0:00:00.019) 0:07:24.416 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Saturday 28 March 2026 19:29:49 -0400 (0:00:00.021) 0:07:24.438 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Saturday 28 March 2026 19:29:49 -0400 (0:00:00.021) 0:07:24.459 ******** redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node12 TASK [fedora.linux_system_roles.storage : Add repo key] ************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Saturday 28 March 2026 19:29:49 -0400 (0:00:00.053) 0:07:24.512 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_blivet_custom_repo.key is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Add blivet repo] ********************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:15 Saturday 28 March 2026 19:29:49 -0400 (0:00:00.023) 0:07:24.536 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_blivet_custom_repo.key is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:20 Saturday 28 March 2026 19:29:49 -0400 (0:00:00.031) 0:07:24.568 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:27 Saturday 28 March 2026 19:29:49 -0400 (0:00:00.030) 0:07:24.598 ******** ok: [managed-node12] => { "storage_pools | d([])": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": true, "encryption_password": "yabbadabbadoo", "name": "foo", "type": "stratis", "volumes": [ { "mount_point": "/opt/test1", "name": "test1", "size": "4g" } ] } ] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:32 Saturday 28 March 2026 19:29:49 -0400 (0:00:00.041) 0:07:24.639 ******** ok: [managed-node12] => { "storage_volumes | d([])": [] } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37 Saturday 28 March 2026 19:29:49 -0400 (0:00:00.038) 0:07:24.678 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:50 Saturday 28 March 2026 19:29:49 -0400 (0:00:00.021) 0:07:24.699 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:56 Saturday 28 March 2026 19:29:49 -0400 (0:00:00.070) 0:07:24.770 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:76 Saturday 28 March 2026 19:29:49 -0400 (0:00:00.027) 0:07:24.797 ******** ok: [managed-node12] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "apt-daily.service": { "name": "apt-daily.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fcoe.service": { "name": "fcoe.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "iscsi-shutdown.service": { "name": "iscsi-shutdown.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsi.service": { "name": "iscsi.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsid.service": { "name": "iscsid.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-activation-early.service": { "name": "lvm2-activation-early.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_multipath.service": { "name": "modprobe@dm_multipath.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "raid-check.service": { "name": "raid-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "rbdmap.service": { "name": "rbdmap.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-unix-local@.service": { "name": "sshd-unix-local@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd-vsock@.service": { "name": "sshd-vsock@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "stratis-fstab-setup-with-network@.service": { "name": "stratis-fstab-setup-with-network@.service", "source": "systemd", "state": "unknown", "status": "static" }, "stratis-fstab-setup@.service": { "name": "stratis-fstab-setup@.service", "source": "systemd", "state": "unknown", "status": "static" }, "stratisd-min-postinitrd.service": { "name": "stratisd-min-postinitrd.service", "source": "systemd", "state": "inactive", "status": "static" }, "stratisd.service": { "name": "stratisd.service", "source": "systemd", "state": "running", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-userdbd.service": { "name": "systemd-userdbd.service", "source": "systemd", "state": "running", "status": "indirect" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:82 Saturday 28 March 2026 19:29:51 -0400 (0:00:02.025) 0:07:26.823 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:88 Saturday 28 March 2026 19:29:51 -0400 (0:00:00.037) 0:07:26.860 ******** ok: [managed-node12] => { "actions": [], "changed": false, "crypts": [], "leaves": [ "/dev/stratis/foo/test1", "/dev/xvda1", "/dev/xvda2" ], "mounts": [ { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "UUID=73aed7df-5753-4b99-a608-d0d02019ecf4", "state": "mounted" } ], "packages": [ "stratisd", "xfsprogs", "stratis-cli" ], "pools": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": true, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "stratis", "volumes": [ { "_device": "/dev/stratis/foo/test1", "_kernel_device": "/dev/dm-7", "_mount_id": "UUID=73aed7df-5753-4b99-a608-d0d02019ecf4", "_raw_device": "/dev/stratis/foo/test1", "_raw_kernel_device": "/dev/dm-7", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:103 Saturday 28 March 2026 19:29:58 -0400 (0:00:07.194) 0:07:34.055 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_udevadm_trigger | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ****** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:110 Saturday 28 March 2026 19:29:59 -0400 (0:00:00.071) 0:07:34.127 ******** ok: [managed-node12] => { "changed": false, "stat": { "atime": 1774740573.5507903, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "db2804575b16f2946d3bf6c98ca11f3291f2e29c", "ctime": 1774740573.548079, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 230686985, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1774740573.548079, "nlink": 1, "path": "/etc/fstab", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1478, "uid": 0, "version": "1679064280", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:115 Saturday 28 March 2026 19:29:59 -0400 (0:00:00.483) 0:07:34.611 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "blivet_output is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:133 Saturday 28 March 2026 19:29:59 -0400 (0:00:00.048) 0:07:34.659 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:139 Saturday 28 March 2026 19:29:59 -0400 (0:00:00.078) 0:07:34.737 ******** ok: [managed-node12] => { "blivet_output": { "actions": [], "changed": false, "crypts": [], "failed": false, "leaves": [ "/dev/stratis/foo/test1", "/dev/xvda1", "/dev/xvda2" ], "mounts": [ { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "UUID=73aed7df-5753-4b99-a608-d0d02019ecf4", "state": "mounted" } ], "packages": [ "stratisd", "xfsprogs", "stratis-cli" ], "pools": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": true, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "stratis", "volumes": [ { "_device": "/dev/stratis/foo/test1", "_kernel_device": "/dev/dm-7", "_mount_id": "UUID=73aed7df-5753-4b99-a608-d0d02019ecf4", "_raw_device": "/dev/stratis/foo/test1", "_raw_kernel_device": "/dev/dm-7", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148 Saturday 28 March 2026 19:29:59 -0400 (0:00:00.075) 0:07:34.814 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": true, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "stratis", "volumes": [ { "_device": "/dev/stratis/foo/test1", "_kernel_device": "/dev/dm-7", "_mount_id": "UUID=73aed7df-5753-4b99-a608-d0d02019ecf4", "_raw_device": "/dev/stratis/foo/test1", "_raw_kernel_device": "/dev/dm-7", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:152 Saturday 28 March 2026 19:29:59 -0400 (0:00:00.062) 0:07:34.877 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:168 Saturday 28 March 2026 19:29:59 -0400 (0:00:00.089) 0:07:34.966 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:179 Saturday 28 March 2026 19:29:59 -0400 (0:00:00.048) 0:07:35.014 ******** ok: [managed-node12] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:184 Saturday 28 March 2026 19:30:00 -0400 (0:00:00.919) 0:07:35.933 ******** redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount ok: [managed-node12] => (item={'src': 'UUID=73aed7df-5753-4b99-a608-d0d02019ecf4', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => { "ansible_loop_var": "mount_info", "backup_file": "", "boot": "yes", "changed": false, "dump": "0", "fstab": "/etc/fstab", "fstype": "xfs", "mount_info": { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "UUID=73aed7df-5753-4b99-a608-d0d02019ecf4", "state": "mounted" }, "name": "/opt/test1", "opts": "defaults", "passno": "0", "src": "UUID=73aed7df-5753-4b99-a608-d0d02019ecf4" } TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195 Saturday 28 March 2026 19:30:01 -0400 (0:00:00.570) 0:07:36.504 ******** skipping: [managed-node12] => (item={'src': 'UUID=73aed7df-5753-4b99-a608-d0d02019ecf4', 'path': '/opt/test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => { "ansible_loop_var": "mount_info", "changed": false, "false_condition": "mount_info['owner'] != none or mount_info['group'] != none or mount_info['mode'] != none", "mount_info": { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/opt/test1", "src": "UUID=73aed7df-5753-4b99-a608-d0d02019ecf4", "state": "mounted" }, "skip_reason": "Conditional result was False" } skipping: [managed-node12] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:207 Saturday 28 March 2026 19:30:01 -0400 (0:00:00.072) 0:07:36.577 ******** ok: [managed-node12] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:215 Saturday 28 March 2026 19:30:02 -0400 (0:00:00.942) 0:07:37.520 ******** ok: [managed-node12] => { "changed": false, "stat": { "atime": 1774737762.6954868, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1774342704.475, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 4194435, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1774342323.629, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "1224473831", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:220 Saturday 28 March 2026 19:30:02 -0400 (0:00:00.507) 0:07:38.027 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:242 Saturday 28 March 2026 19:30:02 -0400 (0:00:00.025) 0:07:38.053 ******** ok: [managed-node12] TASK [Verify role results - 6] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:168 Saturday 28 March 2026 19:30:05 -0400 (0:00:02.066) 0:07:40.119 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node12 TASK [Print out pool information] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2 Saturday 28 March 2026 19:30:05 -0400 (0:00:00.091) 0:07:40.211 ******** ok: [managed-node12] => { "_storage_pools_list": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": true, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "stratis", "volumes": [ { "_device": "/dev/stratis/foo/test1", "_kernel_device": "/dev/dm-7", "_mount_id": "UUID=73aed7df-5753-4b99-a608-d0d02019ecf4", "_raw_device": "/dev/stratis/foo/test1", "_raw_kernel_device": "/dev/dm-7", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } ] } ] } TASK [Print out volume information] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7 Saturday 28 March 2026 19:30:05 -0400 (0:00:00.074) 0:07:40.285 ******** skipping: [managed-node12] => { "false_condition": "_storage_volumes_list | length > 0" } TASK [Collect info about the volumes.] ***************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15 Saturday 28 March 2026 19:30:05 -0400 (0:00:00.037) 0:07:40.322 ******** ok: [managed-node12] => { "changed": false, "info": { "/dev/loop0": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/loop0", "size": "0B", "type": "loop", "uuid": "" }, "/dev/mapper/stratis-1-private-2818607ade064e9c8af91e98a66ed410-crypt": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/mapper/stratis-1-private-2818607ade064e9c8af91e98a66ed410-crypt", "size": "52G", "type": "crypt", "uuid": "" }, "/dev/mapper/stratis-1-private-2818607ade064e9c8af91e98a66ed410-flex-mdv": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/mapper/stratis-1-private-2818607ade064e9c8af91e98a66ed410-flex-mdv", "size": "512M", "type": "stratis", "uuid": "" }, "/dev/mapper/stratis-1-private-2818607ade064e9c8af91e98a66ed410-flex-thindata": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/mapper/stratis-1-private-2818607ade064e9c8af91e98a66ed410-flex-thindata", "size": "50G", "type": "stratis", "uuid": "" }, "/dev/mapper/stratis-1-private-2818607ade064e9c8af91e98a66ed410-flex-thinmeta": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/mapper/stratis-1-private-2818607ade064e9c8af91e98a66ed410-flex-thinmeta", "size": "786M", "type": "stratis", "uuid": "" }, "/dev/mapper/stratis-1-private-2818607ade064e9c8af91e98a66ed410-physical-cache": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/mapper/stratis-1-private-2818607ade064e9c8af91e98a66ed410-physical-cache", "size": "52.1G", "type": "stratis", "uuid": "" }, "/dev/mapper/stratis-1-private-2818607ade064e9c8af91e98a66ed410-physical-originsub": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/mapper/stratis-1-private-2818607ade064e9c8af91e98a66ed410-physical-originsub", "size": "52.1G", "type": "stratis", "uuid": "" }, "/dev/mapper/stratis-1-private-2818607ade064e9c8af91e98a66ed410-thinpool-pool": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/mapper/stratis-1-private-2818607ade064e9c8af91e98a66ed410-thinpool-pool", "size": "50G", "type": "stratis", "uuid": "" }, "/dev/sda": { "fstype": "stratis", "label": "", "mountpoint": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "7d6c178e-9ecc-4871-8591-eb6160c0dfc4" }, "/dev/sdb": { "fstype": "stratis", "label": "", "mountpoint": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "e60c10f9-ecec-4400-afa0-421adab84c38" }, "/dev/sdc": { "fstype": "stratis", "label": "", "mountpoint": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "ce43694f-be78-443b-ab4a-ee11137ae8a7" }, "/dev/sdd": { "fstype": "stratis", "label": "", "mountpoint": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "e36f5949-1179-4998-af32-2efdfe008c8d" }, "/dev/sde": { "fstype": "stratis", "label": "", "mountpoint": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "9bab2aaf-afcf-446e-bee3-441d26ffec5b" }, "/dev/sdf": { "fstype": "stratis", "label": "", "mountpoint": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "c3bb8276-600e-40a4-8da8-571ca56edb00" }, "/dev/sdg": { "fstype": "stratis", "label": "", "mountpoint": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "e57a51f2-445c-4ea2-8313-77ae08fa3917" }, "/dev/sdh": { "fstype": "stratis", "label": "", "mountpoint": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "60dc9f49-23bc-4127-929c-f17a3b3f1d74" }, "/dev/sdi": { "fstype": "stratis", "label": "", "mountpoint": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "390d2998-8451-4939-9793-c6ee49a53091" }, "/dev/stratis/foo/test1": { "fstype": "xfs", "label": "", "mountpoint": "/opt/test1", "name": "/dev/stratis/foo/test1", "size": "4G", "type": "stratis", "uuid": "73aed7df-5753-4b99-a608-d0d02019ecf4" }, "/dev/xvda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda1", "size": "1M", "type": "partition", "uuid": "" }, "/dev/xvda2": { "fstype": "xfs", "label": "", "mountpoint": "/", "name": "/dev/xvda2", "size": "250G", "type": "partition", "uuid": "94fc577e-f1df-44bb-8e86-63b9a68f8f7f" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20 Saturday 28 March 2026 19:30:05 -0400 (0:00:00.518) 0:07:40.841 ******** ok: [managed-node12] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.003015", "end": "2026-03-28 19:30:06.123096", "rc": 0, "start": "2026-03-28 19:30:06.120081" } STDOUT: # system_role:storage # # /etc/fstab # Created by anaconda on Tue Mar 24 08:52:03 2026 # # Accessible filesystems, by reference, are maintained under '/dev/disk/'. # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info. # # After editing this file, run 'systemctl daemon-reload' to update systemd # units generated from this file. # UUID=94fc577e-f1df-44bb-8e86-63b9a68f8f7f / xfs defaults 0 0 ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 UUID=73aed7df-5753-4b99-a608-d0d02019ecf4 /opt/test1 xfs defaults 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25 Saturday 28 March 2026 19:30:06 -0400 (0:00:00.436) 0:07:41.278 ******** ok: [managed-node12] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.003041", "end": "2026-03-28 19:30:06.570016", "failed_when_result": false, "rc": 0, "start": "2026-03-28 19:30:06.566975" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34 Saturday 28 March 2026 19:30:06 -0400 (0:00:00.476) 0:07:41.754 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node12 => (item={'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'encryption': True, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER', 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'stratis', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'part_type': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=73aed7df-5753-4b99-a608-d0d02019ecf4', '_kernel_device': '/dev/dm-7', '_raw_kernel_device': '/dev/dm-7'}]}) TASK [Set _storage_pool_tests] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5 Saturday 28 March 2026 19:30:06 -0400 (0:00:00.118) 0:07:41.872 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Get VG shared value status] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18 Saturday 28 March 2026 19:30:06 -0400 (0:00:00.046) 0:07:41.919 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'", "skip_reason": "Conditional result was False" } TASK [Verify that VG shared value checks out] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24 Saturday 28 March 2026 19:30:06 -0400 (0:00:00.069) 0:07:41.988 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'", "skip_reason": "Conditional result was False" } TASK [Verify pool subset] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34 Saturday 28 March 2026 19:30:06 -0400 (0:00:00.066) 0:07:42.055 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node12 => (item=members) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node12 => (item=volumes) TASK [Set test variables] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2 Saturday 28 March 2026 19:30:07 -0400 (0:00:00.189) 0:07:42.245 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Get the canonical device path for each member device] ******************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8 Saturday 28 March 2026 19:30:07 -0400 (0:00:00.062) 0:07:42.307 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Set pvs lvm length] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17 Saturday 28 March 2026 19:30:07 -0400 (0:00:00.031) 0:07:42.339 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Set pool pvs] ************************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22 Saturday 28 March 2026 19:30:07 -0400 (0:00:00.040) 0:07:42.379 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Verify PV count] ********************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27 Saturday 28 March 2026 19:30:07 -0400 (0:00:00.037) 0:07:42.417 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Set expected pv type] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36 Saturday 28 March 2026 19:30:07 -0400 (0:00:00.045) 0:07:42.462 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Set expected pv type - 2] ************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41 Saturday 28 March 2026 19:30:07 -0400 (0:00:00.038) 0:07:42.500 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm' and not storage_test_pool.encryption", "skip_reason": "Conditional result was False" } TASK [Set expected pv type - 3] ************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46 Saturday 28 March 2026 19:30:07 -0400 (0:00:00.032) 0:07:42.533 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:55 Saturday 28 March 2026 19:30:07 -0400 (0:00:00.040) 0:07:42.574 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Check that blivet supports PV grow to fill] ****************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:68 Saturday 28 March 2026 19:30:07 -0400 (0:00:00.053) 0:07:42.627 ******** ok: [managed-node12] => { "changed": false, "failed_when_result": false, "rc": 0 } STDERR: OpenSSH_9.9p1, OpenSSL 3.5.5 27 Jan 2026 debug1: Reading configuration data /root/.ssh/config debug1: Reading configuration data /etc/ssh/ssh_config debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf debug2: checking match for 'final all' host 10.31.11.27 originally 10.31.11.27 debug2: match not found debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config debug1: configuration requests final Match pass debug2: resolve_canonicalize: hostname 10.31.11.27 is address debug1: re-parsing configuration debug1: Reading configuration data /root/.ssh/config debug1: Reading configuration data /etc/ssh/ssh_config debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf debug2: checking match for 'final all' host 10.31.11.27 originally 10.31.11.27 debug2: match found debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config debug1: auto-mux: Trying existing master at '/root/.ansible/cp/e1141b92e1' debug2: fd 3 setting O_NONBLOCK debug2: mux_client_hello_exchange: master version 4 debug1: mux_client_request_session: master session id: 2 debug2: Received exit status from master 0 Shared connection to 10.31.11.27 closed. TASK [Verify that PVs fill the whole devices when they should] ***************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:78 Saturday 28 March 2026 19:30:08 -0400 (0:00:00.523) 0:07:43.151 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Check MD RAID] *********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:88 Saturday 28 March 2026 19:30:08 -0400 (0:00:00.067) 0:07:43.219 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node12 TASK [Get information about RAID] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8 Saturday 28 March 2026 19:30:08 -0400 (0:00:00.157) 0:07:43.376 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14 Saturday 28 March 2026 19:30:08 -0400 (0:00:00.058) 0:07:43.435 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19 Saturday 28 March 2026 19:30:08 -0400 (0:00:00.078) 0:07:43.513 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24 Saturday 28 March 2026 19:30:08 -0400 (0:00:00.052) 0:07:43.566 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set md chunk size regex] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29 Saturday 28 March 2026 19:30:08 -0400 (0:00:00.047) 0:07:43.613 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37 Saturday 28 March 2026 19:30:08 -0400 (0:00:00.063) 0:07:43.677 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46 Saturday 28 March 2026 19:30:08 -0400 (0:00:00.064) 0:07:43.741 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55 Saturday 28 March 2026 19:30:08 -0400 (0:00:00.057) 0:07:43.799 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64 Saturday 28 March 2026 19:30:08 -0400 (0:00:00.074) 0:07:43.873 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74 Saturday 28 March 2026 19:30:08 -0400 (0:00:00.044) 0:07:43.917 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83 Saturday 28 March 2026 19:30:08 -0400 (0:00:00.038) 0:07:43.956 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_chunk_size_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:91 Saturday 28 March 2026 19:30:08 -0400 (0:00:00.054) 0:07:44.011 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node12 TASK [Validate pool member LVM RAID settings] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2 Saturday 28 March 2026 19:30:09 -0400 (0:00:00.100) 0:07:44.111 ******** skipping: [managed-node12] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'part_type': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=73aed7df-5753-4b99-a608-d0d02019ecf4', '_kernel_device': '/dev/dm-7', '_raw_kernel_device': '/dev/dm-7'}) => { "ansible_loop_var": "storage_test_lvmraid_volume", "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False", "storage_test_lvmraid_volume": { "_device": "/dev/stratis/foo/test1", "_kernel_device": "/dev/dm-7", "_mount_id": "UUID=73aed7df-5753-4b99-a608-d0d02019ecf4", "_raw_device": "/dev/stratis/foo/test1", "_raw_kernel_device": "/dev/dm-7", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } } skipping: [managed-node12] => { "changed": false } MSG: All items skipped TASK [Check Thin Pools] ******************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:94 Saturday 28 March 2026 19:30:09 -0400 (0:00:00.048) 0:07:44.159 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node12 TASK [Validate pool member thinpool settings] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2 Saturday 28 March 2026 19:30:09 -0400 (0:00:00.091) 0:07:44.250 ******** skipping: [managed-node12] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'part_type': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=73aed7df-5753-4b99-a608-d0d02019ecf4', '_kernel_device': '/dev/dm-7', '_raw_kernel_device': '/dev/dm-7'}) => { "ansible_loop_var": "storage_test_thin_volume", "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False", "storage_test_thin_volume": { "_device": "/dev/stratis/foo/test1", "_kernel_device": "/dev/dm-7", "_mount_id": "UUID=73aed7df-5753-4b99-a608-d0d02019ecf4", "_raw_device": "/dev/stratis/foo/test1", "_raw_kernel_device": "/dev/dm-7", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } } skipping: [managed-node12] => { "changed": false } MSG: All items skipped TASK [Check member encryption] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:97 Saturday 28 March 2026 19:30:09 -0400 (0:00:00.066) 0:07:44.317 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node12 TASK [Set test variables] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5 Saturday 28 March 2026 19:30:09 -0400 (0:00:00.124) 0:07:44.442 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "1", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10 Saturday 28 March 2026 19:30:09 -0400 (0:00:00.080) 0:07:44.522 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Validate pool member crypttab entries] *********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17 Saturday 28 March 2026 19:30:09 -0400 (0:00:00.044) 0:07:44.566 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24 Saturday 28 March 2026 19:30:09 -0400 (0:00:00.042) 0:07:44.608 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:100 Saturday 28 March 2026 19:30:09 -0400 (0:00:00.048) 0:07:44.657 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node12 TASK [Validate pool member VDO settings] *************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2 Saturday 28 March 2026 19:30:09 -0400 (0:00:00.091) 0:07:44.749 ******** skipping: [managed-node12] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'part_type': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=73aed7df-5753-4b99-a608-d0d02019ecf4', '_kernel_device': '/dev/dm-7', '_raw_kernel_device': '/dev/dm-7'}) => { "ansible_loop_var": "storage_test_vdo_volume", "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False", "storage_test_vdo_volume": { "_device": "/dev/stratis/foo/test1", "_kernel_device": "/dev/dm-7", "_mount_id": "UUID=73aed7df-5753-4b99-a608-d0d02019ecf4", "_raw_device": "/dev/stratis/foo/test1", "_raw_kernel_device": "/dev/dm-7", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } } skipping: [managed-node12] => { "changed": false } MSG: All items skipped TASK [Check Stratis] *********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:103 Saturday 28 March 2026 19:30:09 -0400 (0:00:00.064) 0:07:44.813 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node12 TASK [Get stratis pool information] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6 Saturday 28 March 2026 19:30:09 -0400 (0:00:00.101) 0:07:44.914 ******** ok: [managed-node12] => { "changed": false, "rc": 0 } STDOUT: {"name": "foo", "encrypted": true, "key_desc": "blivet-foo", "clevis_pin": null, "clevis_args": {}} STDERR: OpenSSH_9.9p1, OpenSSL 3.5.5 27 Jan 2026 debug1: Reading configuration data /root/.ssh/config debug1: Reading configuration data /etc/ssh/ssh_config debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf debug2: checking match for 'final all' host 10.31.11.27 originally 10.31.11.27 debug2: match not found debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config debug1: configuration requests final Match pass debug2: resolve_canonicalize: hostname 10.31.11.27 is address debug1: re-parsing configuration debug1: Reading configuration data /root/.ssh/config debug1: Reading configuration data /etc/ssh/ssh_config debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf debug2: checking match for 'final all' host 10.31.11.27 originally 10.31.11.27 debug2: match found debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config debug1: auto-mux: Trying existing master at '/root/.ansible/cp/e1141b92e1' debug2: fd 3 setting O_NONBLOCK debug2: mux_client_hello_exchange: master version 4 debug1: mux_client_request_session: master session id: 2 debug2: Received exit status from master 0 Shared connection to 10.31.11.27 closed. TASK [Print script output] ***************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15 Saturday 28 March 2026 19:30:10 -0400 (0:00:00.553) 0:07:45.468 ******** ok: [managed-node12] => {} MSG: {'name': 'foo', 'encrypted': True, 'key_desc': 'blivet-foo', 'clevis_pin': None, 'clevis_args': {}} TASK [Get information about Stratis] ******************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:19 Saturday 28 March 2026 19:30:10 -0400 (0:00:00.052) 0:07:45.520 ******** ok: [managed-node12] => { "ansible_facts": { "_stratis_pool_info": { "clevis_args": {}, "clevis_pin": null, "encrypted": true, "key_desc": "blivet-foo", "name": "foo" } }, "changed": false } TASK [Verify that the pools was created] *************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:23 Saturday 28 March 2026 19:30:10 -0400 (0:00:00.064) 0:07:45.585 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Verify that encryption is correctly set] ********************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:30 Saturday 28 March 2026 19:30:10 -0400 (0:00:00.067) 0:07:45.652 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Verify that Clevis/Tang encryption is correctly set] ********************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:39 Saturday 28 March 2026 19:30:10 -0400 (0:00:00.056) 0:07:45.709 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.encryption_clevis_pin == 'tang'", "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:49 Saturday 28 March 2026 19:30:10 -0400 (0:00:00.045) 0:07:45.754 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_stratis_report": null }, "changed": false } TASK [Clean up test variables] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:106 Saturday 28 March 2026 19:30:10 -0400 (0:00:00.038) 0:07:45.792 ******** ok: [managed-node12] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3 Saturday 28 March 2026 19:30:10 -0400 (0:00:00.044) 0:07:45.837 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node12 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'present', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'part_type': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=73aed7df-5753-4b99-a608-d0d02019ecf4', '_kernel_device': '/dev/dm-7', '_raw_kernel_device': '/dev/dm-7'}) TASK [Set storage volume test variables] *************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2 Saturday 28 March 2026 19:30:10 -0400 (0:00:00.056) 0:07:45.893 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_volume_present": true, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [Run test verify for storage_test_volume_subset] ************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19 Saturday 28 March 2026 19:30:10 -0400 (0:00:00.048) 0:07:45.941 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node12 => (item=mount) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node12 => (item=fstab) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node12 => (item=fs) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node12 => (item=device) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node12 => (item=encryption) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node12 => (item=md) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node12 => (item=size) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node12 => (item=cache) TASK [Get expected mount device based on device type] ************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7 Saturday 28 March 2026 19:30:11 -0400 (0:00:00.383) 0:07:46.325 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_device_path": "/dev/stratis/foo/test1" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11 Saturday 28 March 2026 19:30:11 -0400 (0:00:00.062) 0:07:46.388 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_mount_expected_mount_point": "/opt/test1", "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Get information about the mountpoint directory] ************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19 Saturday 28 March 2026 19:30:11 -0400 (0:00:00.072) 0:07:46.460 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "(not storage_test_volume.mount_user is none and storage_test_volume.mount_user | length > 0) or (not storage_test_volume.mount_group is none and storage_test_volume.mount_group | length > 0) or (not storage_test_volume.mount_mode is none and storage_test_volume.mount_mode | length > 0)", "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by device] ******************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:32 Saturday 28 March 2026 19:30:11 -0400 (0:00:00.062) 0:07:46.523 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Verify mount directory user] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:40 Saturday 28 March 2026 19:30:11 -0400 (0:00:00.058) 0:07:46.581 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "not storage_test_volume.mount_user is none", "skip_reason": "Conditional result was False" } TASK [Verify mount directory group] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:51 Saturday 28 March 2026 19:30:11 -0400 (0:00:00.056) 0:07:46.638 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "not storage_test_volume.mount_group is none", "skip_reason": "Conditional result was False" } TASK [Verify mount directory permissions] ************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:62 Saturday 28 March 2026 19:30:11 -0400 (0:00:00.152) 0:07:46.790 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "not storage_test_volume.mount_mode is none", "skip_reason": "Conditional result was False" } TASK [Get path of test volume device] ****************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:76 Saturday 28 March 2026 19:30:11 -0400 (0:00:00.067) 0:07:46.858 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:82 Saturday 28 March 2026 19:30:11 -0400 (0:00:00.055) 0:07:46.913 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:88 Saturday 28 March 2026 19:30:11 -0400 (0:00:00.056) 0:07:46.970 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:98 Saturday 28 March 2026 19:30:11 -0400 (0:00:00.045) 0:07:47.016 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_found_mount_stat": null, "storage_test_mount_expected_mount_point": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2 Saturday 28 March 2026 19:30:11 -0400 (0:00:00.048) 0:07:47.064 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "1", "storage_test_fstab_expected_mount_options_matches": "1", "storage_test_fstab_expected_mount_point_matches": "1", "storage_test_fstab_id_matches": [ "UUID=73aed7df-5753-4b99-a608-d0d02019ecf4 " ], "storage_test_fstab_mount_options_matches": [ " /opt/test1 xfs defaults " ], "storage_test_fstab_mount_point_matches": [ " /opt/test1 " ] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17 Saturday 28 March 2026 19:30:12 -0400 (0:00:00.091) 0:07:47.156 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Verify the fstab mount point] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24 Saturday 28 March 2026 19:30:12 -0400 (0:00:00.038) 0:07:47.195 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33 Saturday 28 March 2026 19:30:12 -0400 (0:00:00.030) 0:07:47.225 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "__storage_verify_mount_options | d(false)", "skip_reason": "Conditional result was False" } TASK [Verify fingerprint] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45 Saturday 28 March 2026 19:30:12 -0400 (0:00:00.026) 0:07:47.252 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Clean up variables] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:52 Saturday 28 March 2026 19:30:12 -0400 (0:00:00.028) 0:07:47.281 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6 Saturday 28 March 2026 19:30:12 -0400 (0:00:00.030) 0:07:47.311 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type != \"stratis\"", "skip_reason": "Conditional result was False" } TASK [Verify fs label] ********************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14 Saturday 28 March 2026 19:30:12 -0400 (0:00:00.025) 0:07:47.337 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type != \"stratis\"", "skip_reason": "Conditional result was False" } TASK [See whether the device node is present] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3 Saturday 28 March 2026 19:30:12 -0400 (0:00:00.020) 0:07:47.357 ******** ok: [managed-node12] => { "changed": false, "stat": { "atime": 1774740571.1487584, "attr_flags": "", "attributes": [], "block_size": 512, "blocks": 0, "charset": "binary", "ctime": 1774740571.1487584, "dev": 6, "device_type": 64775, "executable": false, "exists": true, "gid": 6, "gr_name": "disk", "inode": 1632, "isblk": true, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/symlink", "mode": "0660", "mtime": 1774740571.1487584, "nlink": 1, "path": "/dev/stratis/foo/test1", "pw_name": "root", "readable": true, "rgrp": true, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": null, "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9 Saturday 28 March 2026 19:30:12 -0400 (0:00:00.398) 0:07:47.756 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Verify the presence/absence of the device node - 2] ********************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16 Saturday 28 March 2026 19:30:12 -0400 (0:00:00.062) 0:07:47.818 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "not (_storage_test_volume_present or storage_test_volume.type == 'disk')", "skip_reason": "Conditional result was False" } TASK [Make sure we got info about this volume] ********************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23 Saturday 28 March 2026 19:30:12 -0400 (0:00:00.041) 0:07:47.860 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Process volume type (set initial value) (1/2)] *************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29 Saturday 28 March 2026 19:30:12 -0400 (0:00:00.053) 0:07:47.914 ******** ok: [managed-node12] => { "ansible_facts": { "st_volume_type": "stratis" }, "changed": false } TASK [Process volume type (get RAID value) (2/2)] ****************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33 Saturday 28 March 2026 19:30:12 -0400 (0:00:00.032) 0:07:47.947 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == \"raid\"", "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38 Saturday 28 March 2026 19:30:12 -0400 (0:00:00.027) 0:07:47.974 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Stat the LUKS device, if encrypted] ************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3 Saturday 28 March 2026 19:30:12 -0400 (0:00:00.035) 0:07:48.010 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10 Saturday 28 March 2026 19:30:12 -0400 (0:00:00.020) 0:07:48.031 ******** ok: [managed-node12] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: cryptsetup TASK [Collect LUKS info for this volume] *************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16 Saturday 28 March 2026 19:30:13 -0400 (0:00:00.813) 0:07:48.844 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.encryption and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22 Saturday 28 March 2026 19:30:13 -0400 (0:00:00.036) 0:07:48.881 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29 Saturday 28 March 2026 19:30:13 -0400 (0:00:00.035) 0:07:48.917 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40 Saturday 28 March 2026 19:30:13 -0400 (0:00:00.051) 0:07:48.968 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46 Saturday 28 March 2026 19:30:13 -0400 (0:00:00.052) 0:07:49.020 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51 Saturday 28 March 2026 19:30:13 -0400 (0:00:00.048) 0:07:49.069 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:64 Saturday 28 March 2026 19:30:14 -0400 (0:00:00.057) 0:07:49.127 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:77 Saturday 28 March 2026 19:30:14 -0400 (0:00:00.032) 0:07:49.159 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Set test variables] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:90 Saturday 28 March 2026 19:30:14 -0400 (0:00:00.036) 0:07:49.196 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:96 Saturday 28 March 2026 19:30:14 -0400 (0:00:00.073) 0:07:49.270 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:103 Saturday 28 March 2026 19:30:14 -0400 (0:00:00.056) 0:07:49.326 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:111 Saturday 28 March 2026 19:30:14 -0400 (0:00:00.039) 0:07:49.365 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:119 Saturday 28 March 2026 19:30:14 -0400 (0:00:00.049) 0:07:49.414 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:127 Saturday 28 March 2026 19:30:14 -0400 (0:00:00.043) 0:07:49.458 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [Get information about RAID] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8 Saturday 28 March 2026 19:30:14 -0400 (0:00:00.059) 0:07:49.518 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14 Saturday 28 March 2026 19:30:14 -0400 (0:00:00.037) 0:07:49.556 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19 Saturday 28 March 2026 19:30:14 -0400 (0:00:00.033) 0:07:49.589 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24 Saturday 28 March 2026 19:30:14 -0400 (0:00:00.032) 0:07:49.622 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set chunk size regex] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29 Saturday 28 March 2026 19:30:14 -0400 (0:00:00.021) 0:07:49.643 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37 Saturday 28 March 2026 19:30:14 -0400 (0:00:00.022) 0:07:49.665 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46 Saturday 28 March 2026 19:30:14 -0400 (0:00:00.025) 0:07:49.691 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54 Saturday 28 March 2026 19:30:14 -0400 (0:00:00.020) 0:07:49.711 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62 Saturday 28 March 2026 19:30:14 -0400 (0:00:00.025) 0:07:49.737 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70 Saturday 28 March 2026 19:30:14 -0400 (0:00:00.028) 0:07:49.765 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Parse the actual size of the volume] ************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3 Saturday 28 March 2026 19:30:14 -0400 (0:00:00.019) 0:07:49.785 ******** ok: [managed-node12] => { "bytes": 4294967296, "changed": false, "lvm": "4g", "parted": "4GiB", "size": "4 GiB" } TASK [Parse the requested size of the volume] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11 Saturday 28 March 2026 19:30:15 -0400 (0:00:00.386) 0:07:50.171 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == \"lvm\"", "skip_reason": "Conditional result was False" } TASK [Establish base value for expected size] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20 Saturday 28 March 2026 19:30:15 -0400 (0:00:00.037) 0:07:50.208 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == \"lvm\"", "skip_reason": "Conditional result was False" } TASK [Show expected size] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28 Saturday 28 March 2026 19:30:15 -0400 (0:00:00.048) 0:07:50.257 ******** ok: [managed-node12] => { "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined" } TASK [Get the size of parent/pool device] ************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32 Saturday 28 March 2026 19:30:15 -0400 (0:00:00.052) 0:07:50.309 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == \"lvm\"", "skip_reason": "Conditional result was False" } TASK [Show test pool] ********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46 Saturday 28 March 2026 19:30:15 -0400 (0:00:00.059) 0:07:50.368 ******** skipping: [managed-node12] => { "false_condition": "storage_test_volume.type == \"lvm\"" } TASK [Show test blockinfo] ***************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50 Saturday 28 March 2026 19:30:15 -0400 (0:00:00.052) 0:07:50.421 ******** skipping: [managed-node12] => { "false_condition": "storage_test_volume.type == \"lvm\"" } TASK [Show test pool size] ***************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54 Saturday 28 March 2026 19:30:15 -0400 (0:00:00.036) 0:07:50.458 ******** skipping: [managed-node12] => { "false_condition": "storage_test_volume.type == \"lvm\"" } TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58 Saturday 28 March 2026 19:30:15 -0400 (0:00:00.042) 0:07:50.500 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == \"lvm\"", "skip_reason": "Conditional result was False" } TASK [Default thin pool reserved space values] ********************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:68 Saturday 28 March 2026 19:30:15 -0400 (0:00:00.060) 0:07:50.561 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Default minimal thin pool reserved space size] *************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:72 Saturday 28 March 2026 19:30:15 -0400 (0:00:00.049) 0:07:50.610 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Default maximal thin pool reserved space size] *************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:77 Saturday 28 March 2026 19:30:15 -0400 (0:00:00.034) 0:07:50.645 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Calculate maximum usable space in thin pool] ***************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:83 Saturday 28 March 2026 19:30:15 -0400 (0:00:00.035) 0:07:50.681 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Apply upper size limit to max usable thin pool space] ******************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:87 Saturday 28 March 2026 19:30:15 -0400 (0:00:00.038) 0:07:50.720 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Apply lower size limit to max usable thin pool space] ******************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:92 Saturday 28 March 2026 19:30:15 -0400 (0:00:00.036) 0:07:50.756 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Convert maximum usable thin pool space from int to Size] ***************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:97 Saturday 28 March 2026 19:30:15 -0400 (0:00:00.035) 0:07:50.792 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Show max thin pool size] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:102 Saturday 28 March 2026 19:30:15 -0400 (0:00:00.046) 0:07:50.839 ******** skipping: [managed-node12] => { "false_condition": "storage_test_volume.thin | bool" } TASK [Show volume thin pool size] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:106 Saturday 28 March 2026 19:30:15 -0400 (0:00:00.052) 0:07:50.891 ******** skipping: [managed-node12] => { "false_condition": "storage_test_volume.thin | bool" } TASK [Show test volume size] *************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:110 Saturday 28 March 2026 19:30:15 -0400 (0:00:00.043) 0:07:50.934 ******** skipping: [managed-node12] => { "false_condition": "storage_test_volume.thin | bool" } TASK [Establish base value for expected thin pool size] ************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:114 Saturday 28 March 2026 19:30:15 -0400 (0:00:00.054) 0:07:50.989 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Calculate the expected size based on pool size and percentage value - 2] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:121 Saturday 28 March 2026 19:30:15 -0400 (0:00:00.052) 0:07:51.041 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Establish base value for expected thin pool volume size] ***************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:128 Saturday 28 March 2026 19:30:15 -0400 (0:00:00.044) 0:07:51.086 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Calculate the expected thin pool volume size based on percentage value] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:132 Saturday 28 March 2026 19:30:16 -0400 (0:00:00.060) 0:07:51.147 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Replace expected volume size with calculated value] ********************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:138 Saturday 28 March 2026 19:30:16 -0400 (0:00:00.039) 0:07:51.187 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Show actual size] ******************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:144 Saturday 28 March 2026 19:30:16 -0400 (0:00:00.037) 0:07:51.224 ******** ok: [managed-node12] => { "storage_test_actual_size": { "bytes": 4294967296, "changed": false, "failed": false, "lvm": "4g", "parted": "4GiB", "size": "4 GiB" } } TASK [Show expected size - 2] ************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:148 Saturday 28 March 2026 19:30:16 -0400 (0:00:00.041) 0:07:51.266 ******** ok: [managed-node12] => { "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined" } TASK [Assert expected size is actual size] ************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:152 Saturday 28 March 2026 19:30:16 -0400 (0:00:00.040) 0:07:51.306 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == \"lvm\"", "skip_reason": "Conditional result was False" } TASK [Get information about the LV] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5 Saturday 28 March 2026 19:30:16 -0400 (0:00:00.036) 0:07:51.342 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13 Saturday 28 March 2026 19:30:16 -0400 (0:00:00.040) 0:07:51.383 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17 Saturday 28 March 2026 19:30:16 -0400 (0:00:00.042) 0:07:51.425 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Set LV cache size] ******************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24 Saturday 28 March 2026 19:30:16 -0400 (0:00:00.058) 0:07:51.484 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Parse the requested cache size] ****************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31 Saturday 28 March 2026 19:30:16 -0400 (0:00:00.046) 0:07:51.530 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Set expected cache size] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37 Saturday 28 March 2026 19:30:16 -0400 (0:00:00.073) 0:07:51.603 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42 Saturday 28 March 2026 19:30:16 -0400 (0:00:00.048) 0:07:51.651 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25 Saturday 28 March 2026 19:30:16 -0400 (0:00:00.055) 0:07:51.707 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:43 Saturday 28 March 2026 19:30:16 -0400 (0:00:00.065) 0:07:51.773 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Clean up variable namespace] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:52 Saturday 28 March 2026 19:30:16 -0400 (0:00:00.041) 0:07:51.815 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Clean up - 2] ************************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:173 Saturday 28 March 2026 19:30:16 -0400 (0:00:00.038) 0:07:51.853 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tasks/run_role_with_clear_facts.yml for managed-node12 TASK [Clear facts] ************************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tasks/run_role_with_clear_facts.yml:10 Saturday 28 March 2026 19:30:16 -0400 (0:00:00.057) 0:07:51.910 ******** META: facts cleared TASK [Run the role] ************************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tasks/run_role_with_clear_facts.yml:24 Saturday 28 March 2026 19:30:16 -0400 (0:00:00.011) 0:07:51.922 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "__sr_failed_when is defined", "skip_reason": "Conditional result was False" } TASK [Run the role normally] *************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tasks/run_role_with_clear_facts.yml:34 Saturday 28 March 2026 19:30:16 -0400 (0:00:00.033) 0:07:51.955 ******** included: fedora.linux_system_roles.storage for managed-node12 TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Saturday 28 March 2026 19:30:16 -0400 (0:00:00.042) 0:07:51.997 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node12 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Saturday 28 March 2026 19:30:16 -0400 (0:00:00.026) 0:07:52.023 ******** ok: [managed-node12] TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Saturday 28 March 2026 19:30:17 -0400 (0:00:01.072) 0:07:53.096 ******** skipping: [managed-node12] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node12] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node12] => (item=CentOS_10.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-fs", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "xfsprogs", "stratisd", "stratis-cli", "{{ 'libblockdev-s390' if ansible_facts['architecture'] == 's390x' else 'libblockdev' }}", "vdo" ] }, "ansible_included_var_files": [ "/tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node12] => (item=CentOS_10.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-fs", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "xfsprogs", "stratisd", "stratis-cli", "{{ 'libblockdev-s390' if ansible_facts['architecture'] == 's390x' else 'libblockdev' }}", "vdo" ] }, "ansible_included_var_files": [ "/tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Saturday 28 March 2026 19:30:18 -0400 (0:00:00.181) 0:07:53.277 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "not __storage_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Saturday 28 March 2026 19:30:18 -0400 (0:00:00.045) 0:07:53.323 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "not __storage_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Saturday 28 March 2026 19:30:18 -0400 (0:00:00.046) 0:07:53.369 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Saturday 28 March 2026 19:30:18 -0400 (0:00:00.053) 0:07:53.423 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Saturday 28 March 2026 19:30:18 -0400 (0:00:00.040) 0:07:53.463 ******** redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node12 TASK [fedora.linux_system_roles.storage : Add repo key] ************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Saturday 28 March 2026 19:30:18 -0400 (0:00:00.058) 0:07:53.521 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_blivet_custom_repo.key is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Add blivet repo] ********************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:15 Saturday 28 March 2026 19:30:18 -0400 (0:00:00.019) 0:07:53.540 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_blivet_custom_repo.key is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:20 Saturday 28 March 2026 19:30:18 -0400 (0:00:00.022) 0:07:53.563 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:27 Saturday 28 March 2026 19:30:18 -0400 (0:00:00.028) 0:07:53.592 ******** ok: [managed-node12] => { "storage_pools | d([])": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "name": "foo", "state": "absent", "type": "stratis", "volumes": [ { "mount_point": "/opt/test1", "name": "test1", "size": "4g", "state": "absent" } ] } ] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:32 Saturday 28 March 2026 19:30:18 -0400 (0:00:00.032) 0:07:53.625 ******** ok: [managed-node12] => { "storage_volumes | d([])": [] } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37 Saturday 28 March 2026 19:30:18 -0400 (0:00:00.032) 0:07:53.657 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:50 Saturday 28 March 2026 19:30:18 -0400 (0:00:00.020) 0:07:53.677 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:56 Saturday 28 March 2026 19:30:18 -0400 (0:00:00.020) 0:07:53.698 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:76 Saturday 28 March 2026 19:30:18 -0400 (0:00:00.019) 0:07:53.718 ******** ok: [managed-node12] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "apt-daily.service": { "name": "apt-daily.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fcoe.service": { "name": "fcoe.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "iscsi-shutdown.service": { "name": "iscsi-shutdown.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsi.service": { "name": "iscsi.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsid.service": { "name": "iscsid.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-activation-early.service": { "name": "lvm2-activation-early.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_multipath.service": { "name": "modprobe@dm_multipath.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "raid-check.service": { "name": "raid-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "rbdmap.service": { "name": "rbdmap.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-unix-local@.service": { "name": "sshd-unix-local@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd-vsock@.service": { "name": "sshd-vsock@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "stratis-fstab-setup-with-network@.service": { "name": "stratis-fstab-setup-with-network@.service", "source": "systemd", "state": "unknown", "status": "static" }, "stratis-fstab-setup@.service": { "name": "stratis-fstab-setup@.service", "source": "systemd", "state": "unknown", "status": "static" }, "stratisd-min-postinitrd.service": { "name": "stratisd-min-postinitrd.service", "source": "systemd", "state": "inactive", "status": "static" }, "stratisd.service": { "name": "stratisd.service", "source": "systemd", "state": "running", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-userdbd.service": { "name": "systemd-userdbd.service", "source": "systemd", "state": "running", "status": "indirect" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:82 Saturday 28 March 2026 19:30:20 -0400 (0:00:02.035) 0:07:55.753 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:88 Saturday 28 March 2026 19:30:20 -0400 (0:00:00.038) 0:07:55.792 ******** changed: [managed-node12] => { "actions": [ { "action": "destroy format", "device": "/dev/stratis/foo/test1", "fs_type": "stratis xfs" }, { "action": "destroy device", "device": "/dev/stratis/foo/test1", "fs_type": null }, { "action": "destroy device", "device": "/dev/stratis/foo", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdb", "fs_type": "stratis" }, { "action": "destroy format", "device": "/dev/sdi", "fs_type": "stratis" }, { "action": "destroy format", "device": "/dev/sdd", "fs_type": "stratis" }, { "action": "destroy format", "device": "/dev/sdg", "fs_type": "stratis" }, { "action": "destroy format", "device": "/dev/sde", "fs_type": "stratis" }, { "action": "destroy format", "device": "/dev/sdc", "fs_type": "stratis" }, { "action": "destroy format", "device": "/dev/sda", "fs_type": "stratis" }, { "action": "destroy format", "device": "/dev/sdf", "fs_type": "stratis" }, { "action": "destroy format", "device": "/dev/sdh", "fs_type": "stratis" } ], "changed": true, "crypts": [], "leaves": [ "/dev/sda", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/xvda2" ], "mounts": [ { "fstype": "xfs", "path": "/opt/test1", "src": "UUID=73aed7df-5753-4b99-a608-d0d02019ecf4", "state": "absent" } ], "packages": [ "xfsprogs" ], "pools": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": true, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "stratis", "volumes": [ { "_device": "/dev/stratis/foo/test1", "_mount_id": "UUID=73aed7df-5753-4b99-a608-d0d02019ecf4", "_raw_device": "/dev/stratis/foo/test1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:103 Saturday 28 March 2026 19:30:39 -0400 (0:00:18.938) 0:08:14.731 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_udevadm_trigger | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ****** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:110 Saturday 28 March 2026 19:30:39 -0400 (0:00:00.042) 0:08:14.773 ******** ok: [managed-node12] => { "changed": false, "stat": { "atime": 1774740573.5507903, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "db2804575b16f2946d3bf6c98ca11f3291f2e29c", "ctime": 1774740573.548079, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 230686985, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1774740573.548079, "nlink": 1, "path": "/etc/fstab", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1478, "uid": 0, "version": "1679064280", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:115 Saturday 28 March 2026 19:30:40 -0400 (0:00:00.419) 0:08:15.193 ******** ok: [managed-node12] => { "backup": "", "changed": false } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:133 Saturday 28 March 2026 19:30:40 -0400 (0:00:00.422) 0:08:15.615 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:139 Saturday 28 March 2026 19:30:40 -0400 (0:00:00.077) 0:08:15.693 ******** ok: [managed-node12] => { "blivet_output": { "actions": [ { "action": "destroy format", "device": "/dev/stratis/foo/test1", "fs_type": "stratis xfs" }, { "action": "destroy device", "device": "/dev/stratis/foo/test1", "fs_type": null }, { "action": "destroy device", "device": "/dev/stratis/foo", "fs_type": null }, { "action": "destroy format", "device": "/dev/sdb", "fs_type": "stratis" }, { "action": "destroy format", "device": "/dev/sdi", "fs_type": "stratis" }, { "action": "destroy format", "device": "/dev/sdd", "fs_type": "stratis" }, { "action": "destroy format", "device": "/dev/sdg", "fs_type": "stratis" }, { "action": "destroy format", "device": "/dev/sde", "fs_type": "stratis" }, { "action": "destroy format", "device": "/dev/sdc", "fs_type": "stratis" }, { "action": "destroy format", "device": "/dev/sda", "fs_type": "stratis" }, { "action": "destroy format", "device": "/dev/sdf", "fs_type": "stratis" }, { "action": "destroy format", "device": "/dev/sdh", "fs_type": "stratis" } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/sda", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/xvda2" ], "mounts": [ { "fstype": "xfs", "path": "/opt/test1", "src": "UUID=73aed7df-5753-4b99-a608-d0d02019ecf4", "state": "absent" } ], "packages": [ "xfsprogs" ], "pools": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": true, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "stratis", "volumes": [ { "_device": "/dev/stratis/foo/test1", "_mount_id": "UUID=73aed7df-5753-4b99-a608-d0d02019ecf4", "_raw_device": "/dev/stratis/foo/test1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148 Saturday 28 March 2026 19:30:40 -0400 (0:00:00.070) 0:08:15.764 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": true, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "stratis", "volumes": [ { "_device": "/dev/stratis/foo/test1", "_mount_id": "UUID=73aed7df-5753-4b99-a608-d0d02019ecf4", "_raw_device": "/dev/stratis/foo/test1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:152 Saturday 28 March 2026 19:30:40 -0400 (0:00:00.055) 0:08:15.819 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:168 Saturday 28 March 2026 19:30:40 -0400 (0:00:00.044) 0:08:15.864 ******** redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount changed: [managed-node12] => (item={'src': 'UUID=73aed7df-5753-4b99-a608-d0d02019ecf4', 'path': '/opt/test1', 'state': 'absent', 'fstype': 'xfs'}) => { "ansible_loop_var": "mount_info", "backup_file": "", "boot": "yes", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "xfs", "mount_info": { "fstype": "xfs", "path": "/opt/test1", "src": "UUID=73aed7df-5753-4b99-a608-d0d02019ecf4", "state": "absent" }, "name": "/opt/test1", "opts": "defaults", "passno": "0", "src": "UUID=73aed7df-5753-4b99-a608-d0d02019ecf4" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:179 Saturday 28 March 2026 19:30:41 -0400 (0:00:00.442) 0:08:16.307 ******** ok: [managed-node12] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:184 Saturday 28 March 2026 19:30:42 -0400 (0:00:00.843) 0:08:17.150 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195 Saturday 28 March 2026 19:30:42 -0400 (0:00:00.058) 0:08:17.209 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:207 Saturday 28 March 2026 19:30:42 -0400 (0:00:00.056) 0:08:17.265 ******** ok: [managed-node12] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:215 Saturday 28 March 2026 19:30:43 -0400 (0:00:00.930) 0:08:18.196 ******** ok: [managed-node12] => { "changed": false, "stat": { "atime": 1774737762.6954868, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1774342704.475, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 4194435, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1774342323.629, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "1224473831", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:220 Saturday 28 March 2026 19:30:43 -0400 (0:00:00.517) 0:08:18.714 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:242 Saturday 28 March 2026 19:30:43 -0400 (0:00:00.063) 0:08:18.777 ******** ok: [managed-node12] TASK [Verify role results - 7] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:187 Saturday 28 March 2026 19:30:44 -0400 (0:00:01.100) 0:08:19.877 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node12 TASK [Print out pool information] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2 Saturday 28 March 2026 19:30:44 -0400 (0:00:00.052) 0:08:19.930 ******** ok: [managed-node12] => { "_storage_pools_list": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": true, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "stratis", "volumes": [ { "_device": "/dev/stratis/foo/test1", "_mount_id": "UUID=73aed7df-5753-4b99-a608-d0d02019ecf4", "_raw_device": "/dev/stratis/foo/test1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } ] } ] } TASK [Print out volume information] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7 Saturday 28 March 2026 19:30:44 -0400 (0:00:00.073) 0:08:20.003 ******** skipping: [managed-node12] => { "false_condition": "_storage_volumes_list | length > 0" } TASK [Collect info about the volumes.] ***************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15 Saturday 28 March 2026 19:30:44 -0400 (0:00:00.051) 0:08:20.055 ******** ok: [managed-node12] => { "changed": false, "info": { "/dev/loop0": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/loop0", "size": "0B", "type": "loop", "uuid": "" }, "/dev/sda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdb": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda1", "size": "1M", "type": "partition", "uuid": "" }, "/dev/xvda2": { "fstype": "xfs", "label": "", "mountpoint": "/", "name": "/dev/xvda2", "size": "250G", "type": "partition", "uuid": "94fc577e-f1df-44bb-8e86-63b9a68f8f7f" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20 Saturday 28 March 2026 19:30:45 -0400 (0:00:00.518) 0:08:20.574 ******** ok: [managed-node12] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.002908", "end": "2026-03-28 19:30:45.861906", "rc": 0, "start": "2026-03-28 19:30:45.858998" } STDOUT: # system_role:storage # # /etc/fstab # Created by anaconda on Tue Mar 24 08:52:03 2026 # # Accessible filesystems, by reference, are maintained under '/dev/disk/'. # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info. # # After editing this file, run 'systemctl daemon-reload' to update systemd # units generated from this file. # UUID=94fc577e-f1df-44bb-8e86-63b9a68f8f7f / xfs defaults 0 0 ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25 Saturday 28 March 2026 19:30:45 -0400 (0:00:00.449) 0:08:21.023 ******** ok: [managed-node12] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.002923", "end": "2026-03-28 19:30:46.252914", "failed_when_result": false, "rc": 0, "start": "2026-03-28 19:30:46.249991" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34 Saturday 28 March 2026 19:30:46 -0400 (0:00:00.424) 0:08:21.448 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node12 => (item={'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'encryption': True, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'absent', 'type': 'stratis', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'part_type': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=73aed7df-5753-4b99-a608-d0d02019ecf4'}]}) TASK [Set _storage_pool_tests] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5 Saturday 28 March 2026 19:30:46 -0400 (0:00:00.166) 0:08:21.615 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Get VG shared value status] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18 Saturday 28 March 2026 19:30:46 -0400 (0:00:00.075) 0:08:21.691 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'", "skip_reason": "Conditional result was False" } TASK [Verify that VG shared value checks out] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24 Saturday 28 March 2026 19:30:46 -0400 (0:00:00.065) 0:08:21.757 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'", "skip_reason": "Conditional result was False" } TASK [Verify pool subset] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34 Saturday 28 March 2026 19:30:46 -0400 (0:00:00.074) 0:08:21.831 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node12 => (item=members) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node12 => (item=volumes) TASK [Set test variables] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2 Saturday 28 March 2026 19:30:46 -0400 (0:00:00.153) 0:08:21.984 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Get the canonical device path for each member device] ******************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8 Saturday 28 March 2026 19:30:46 -0400 (0:00:00.043) 0:08:22.028 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Set pvs lvm length] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17 Saturday 28 March 2026 19:30:46 -0400 (0:00:00.035) 0:08:22.063 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Set pool pvs] ************************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22 Saturday 28 March 2026 19:30:47 -0400 (0:00:00.044) 0:08:22.108 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Verify PV count] ********************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27 Saturday 28 March 2026 19:30:47 -0400 (0:00:00.048) 0:08:22.157 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Set expected pv type] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36 Saturday 28 March 2026 19:30:47 -0400 (0:00:00.053) 0:08:22.210 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Set expected pv type - 2] ************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41 Saturday 28 March 2026 19:30:47 -0400 (0:00:00.042) 0:08:22.253 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm' and not storage_test_pool.encryption", "skip_reason": "Conditional result was False" } TASK [Set expected pv type - 3] ************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46 Saturday 28 March 2026 19:30:47 -0400 (0:00:00.059) 0:08:22.313 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:55 Saturday 28 March 2026 19:30:47 -0400 (0:00:00.038) 0:08:22.351 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Check that blivet supports PV grow to fill] ****************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:68 Saturday 28 March 2026 19:30:47 -0400 (0:00:00.028) 0:08:22.380 ******** ok: [managed-node12] => { "changed": false, "failed_when_result": false, "rc": 0 } STDERR: OpenSSH_9.9p1, OpenSSL 3.5.5 27 Jan 2026 debug1: Reading configuration data /root/.ssh/config debug1: Reading configuration data /etc/ssh/ssh_config debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf debug2: checking match for 'final all' host 10.31.11.27 originally 10.31.11.27 debug2: match not found debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config debug1: configuration requests final Match pass debug2: resolve_canonicalize: hostname 10.31.11.27 is address debug1: re-parsing configuration debug1: Reading configuration data /root/.ssh/config debug1: Reading configuration data /etc/ssh/ssh_config debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf debug2: checking match for 'final all' host 10.31.11.27 originally 10.31.11.27 debug2: match found debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config debug1: auto-mux: Trying existing master at '/root/.ansible/cp/e1141b92e1' debug2: fd 3 setting O_NONBLOCK debug2: mux_client_hello_exchange: master version 4 debug1: mux_client_request_session: master session id: 2 debug2: Received exit status from master 0 Shared connection to 10.31.11.27 closed. TASK [Verify that PVs fill the whole devices when they should] ***************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:78 Saturday 28 March 2026 19:30:47 -0400 (0:00:00.453) 0:08:22.833 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Check MD RAID] *********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:88 Saturday 28 March 2026 19:30:47 -0400 (0:00:00.038) 0:08:22.872 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node12 TASK [Get information about RAID] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8 Saturday 28 March 2026 19:30:47 -0400 (0:00:00.100) 0:08:22.972 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14 Saturday 28 March 2026 19:30:47 -0400 (0:00:00.045) 0:08:23.018 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19 Saturday 28 March 2026 19:30:47 -0400 (0:00:00.045) 0:08:23.063 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24 Saturday 28 March 2026 19:30:48 -0400 (0:00:00.049) 0:08:23.113 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set md chunk size regex] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29 Saturday 28 March 2026 19:30:48 -0400 (0:00:00.069) 0:08:23.182 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37 Saturday 28 March 2026 19:30:48 -0400 (0:00:00.052) 0:08:23.235 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46 Saturday 28 March 2026 19:30:48 -0400 (0:00:00.079) 0:08:23.314 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55 Saturday 28 March 2026 19:30:48 -0400 (0:00:00.073) 0:08:23.388 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64 Saturday 28 March 2026 19:30:48 -0400 (0:00:00.069) 0:08:23.457 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74 Saturday 28 March 2026 19:30:48 -0400 (0:00:00.069) 0:08:23.527 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83 Saturday 28 March 2026 19:30:48 -0400 (0:00:00.052) 0:08:23.579 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_chunk_size_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:91 Saturday 28 March 2026 19:30:48 -0400 (0:00:00.072) 0:08:23.651 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node12 TASK [Validate pool member LVM RAID settings] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2 Saturday 28 March 2026 19:30:48 -0400 (0:00:00.206) 0:08:23.858 ******** skipping: [managed-node12] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'part_type': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=73aed7df-5753-4b99-a608-d0d02019ecf4'}) => { "ansible_loop_var": "storage_test_lvmraid_volume", "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False", "storage_test_lvmraid_volume": { "_device": "/dev/stratis/foo/test1", "_mount_id": "UUID=73aed7df-5753-4b99-a608-d0d02019ecf4", "_raw_device": "/dev/stratis/foo/test1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } } skipping: [managed-node12] => { "changed": false } MSG: All items skipped TASK [Check Thin Pools] ******************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:94 Saturday 28 March 2026 19:30:48 -0400 (0:00:00.059) 0:08:23.917 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node12 TASK [Validate pool member thinpool settings] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2 Saturday 28 March 2026 19:30:48 -0400 (0:00:00.092) 0:08:24.009 ******** skipping: [managed-node12] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'part_type': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=73aed7df-5753-4b99-a608-d0d02019ecf4'}) => { "ansible_loop_var": "storage_test_thin_volume", "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False", "storage_test_thin_volume": { "_device": "/dev/stratis/foo/test1", "_mount_id": "UUID=73aed7df-5753-4b99-a608-d0d02019ecf4", "_raw_device": "/dev/stratis/foo/test1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } } skipping: [managed-node12] => { "changed": false } MSG: All items skipped TASK [Check member encryption] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:97 Saturday 28 March 2026 19:30:48 -0400 (0:00:00.062) 0:08:24.072 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node12 TASK [Set test variables] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5 Saturday 28 March 2026 19:30:49 -0400 (0:00:00.167) 0:08:24.240 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10 Saturday 28 March 2026 19:30:49 -0400 (0:00:00.077) 0:08:24.317 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Validate pool member crypttab entries] *********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17 Saturday 28 March 2026 19:30:49 -0400 (0:00:00.043) 0:08:24.361 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24 Saturday 28 March 2026 19:30:49 -0400 (0:00:00.036) 0:08:24.398 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:100 Saturday 28 March 2026 19:30:49 -0400 (0:00:00.038) 0:08:24.436 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node12 TASK [Validate pool member VDO settings] *************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2 Saturday 28 March 2026 19:30:49 -0400 (0:00:00.104) 0:08:24.540 ******** skipping: [managed-node12] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'part_type': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=73aed7df-5753-4b99-a608-d0d02019ecf4'}) => { "ansible_loop_var": "storage_test_vdo_volume", "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False", "storage_test_vdo_volume": { "_device": "/dev/stratis/foo/test1", "_mount_id": "UUID=73aed7df-5753-4b99-a608-d0d02019ecf4", "_raw_device": "/dev/stratis/foo/test1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } } skipping: [managed-node12] => { "changed": false } MSG: All items skipped TASK [Check Stratis] *********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:103 Saturday 28 March 2026 19:30:49 -0400 (0:00:00.094) 0:08:24.635 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node12 TASK [Get stratis pool information] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6 Saturday 28 March 2026 19:30:49 -0400 (0:00:00.154) 0:08:24.790 ******** ok: [managed-node12] => { "changed": false, "rc": 0 } STDOUT: null STDERR: OpenSSH_9.9p1, OpenSSL 3.5.5 27 Jan 2026 debug1: Reading configuration data /root/.ssh/config debug1: Reading configuration data /etc/ssh/ssh_config debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf debug2: checking match for 'final all' host 10.31.11.27 originally 10.31.11.27 debug2: match not found debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config debug1: configuration requests final Match pass debug2: resolve_canonicalize: hostname 10.31.11.27 is address debug1: re-parsing configuration debug1: Reading configuration data /root/.ssh/config debug1: Reading configuration data /etc/ssh/ssh_config debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf debug2: checking match for 'final all' host 10.31.11.27 originally 10.31.11.27 debug2: match found debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config debug1: auto-mux: Trying existing master at '/root/.ansible/cp/e1141b92e1' debug2: fd 3 setting O_NONBLOCK debug2: mux_client_hello_exchange: master version 4 debug1: mux_client_request_session: master session id: 2 debug2: Received exit status from master 0 Shared connection to 10.31.11.27 closed. TASK [Print script output] ***************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15 Saturday 28 March 2026 19:30:50 -0400 (0:00:00.465) 0:08:25.255 ******** ok: [managed-node12] => {} MSG: null TASK [Get information about Stratis] ******************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:19 Saturday 28 March 2026 19:30:50 -0400 (0:00:00.032) 0:08:25.287 ******** ok: [managed-node12] => { "ansible_facts": { "_stratis_pool_info": "" }, "changed": false } TASK [Verify that the pools was created] *************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:23 Saturday 28 March 2026 19:30:50 -0400 (0:00:00.046) 0:08:25.334 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.state == 'present'", "skip_reason": "Conditional result was False" } TASK [Verify that encryption is correctly set] ********************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:30 Saturday 28 March 2026 19:30:50 -0400 (0:00:00.053) 0:08:25.388 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.state == 'present'", "skip_reason": "Conditional result was False" } TASK [Verify that Clevis/Tang encryption is correctly set] ********************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:39 Saturday 28 March 2026 19:30:50 -0400 (0:00:00.033) 0:08:25.421 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.state == 'present'", "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:49 Saturday 28 March 2026 19:30:50 -0400 (0:00:00.033) 0:08:25.454 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_stratis_report": null }, "changed": false } TASK [Clean up test variables] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:106 Saturday 28 March 2026 19:30:50 -0400 (0:00:00.058) 0:08:25.513 ******** ok: [managed-node12] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3 Saturday 28 March 2026 19:30:50 -0400 (0:00:00.076) 0:08:25.590 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node12 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'part_type': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '/dev/stratis/foo/test1', '_raw_device': '/dev/stratis/foo/test1', '_mount_id': 'UUID=73aed7df-5753-4b99-a608-d0d02019ecf4'}) TASK [Set storage volume test variables] *************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2 Saturday 28 March 2026 19:30:50 -0400 (0:00:00.157) 0:08:25.747 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_volume_present": false, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [Run test verify for storage_test_volume_subset] ************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19 Saturday 28 March 2026 19:30:50 -0400 (0:00:00.067) 0:08:25.815 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node12 => (item=mount) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node12 => (item=fstab) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node12 => (item=fs) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node12 => (item=device) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node12 => (item=encryption) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node12 => (item=md) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node12 => (item=size) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node12 => (item=cache) TASK [Get expected mount device based on device type] ************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7 Saturday 28 March 2026 19:30:51 -0400 (0:00:00.548) 0:08:26.363 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_device_path": "/dev/stratis/foo/test1" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11 Saturday 28 March 2026 19:30:51 -0400 (0:00:00.066) 0:08:26.430 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_mount_expected_mount_point": "/opt/test1", "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Get information about the mountpoint directory] ************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19 Saturday 28 March 2026 19:30:51 -0400 (0:00:00.089) 0:08:26.519 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present | bool", "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by device] ******************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:32 Saturday 28 March 2026 19:30:51 -0400 (0:00:00.038) 0:08:26.557 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Verify mount directory user] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:40 Saturday 28 March 2026 19:30:51 -0400 (0:00:00.056) 0:08:26.614 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Verify mount directory group] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:51 Saturday 28 March 2026 19:30:51 -0400 (0:00:00.039) 0:08:26.654 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Verify mount directory permissions] ************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:62 Saturday 28 March 2026 19:30:51 -0400 (0:00:00.043) 0:08:26.697 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Get path of test volume device] ****************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:76 Saturday 28 March 2026 19:30:51 -0400 (0:00:00.058) 0:08:26.756 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:82 Saturday 28 March 2026 19:30:51 -0400 (0:00:00.062) 0:08:26.819 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:88 Saturday 28 March 2026 19:30:51 -0400 (0:00:00.067) 0:08:26.886 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:98 Saturday 28 March 2026 19:30:51 -0400 (0:00:00.056) 0:08:26.943 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_found_mount_stat": null, "storage_test_mount_expected_mount_point": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2 Saturday 28 March 2026 19:30:51 -0400 (0:00:00.057) 0:08:27.000 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "0", "storage_test_fstab_expected_mount_options_matches": "0", "storage_test_fstab_expected_mount_point_matches": "0", "storage_test_fstab_id_matches": [], "storage_test_fstab_mount_options_matches": [], "storage_test_fstab_mount_point_matches": [] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17 Saturday 28 March 2026 19:30:51 -0400 (0:00:00.083) 0:08:27.083 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present | bool", "skip_reason": "Conditional result was False" } TASK [Verify the fstab mount point] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24 Saturday 28 March 2026 19:30:52 -0400 (0:00:00.044) 0:08:27.128 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33 Saturday 28 March 2026 19:30:52 -0400 (0:00:00.050) 0:08:27.179 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "__storage_verify_mount_options | d(false)", "skip_reason": "Conditional result was False" } TASK [Verify fingerprint] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45 Saturday 28 March 2026 19:30:52 -0400 (0:00:00.039) 0:08:27.218 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Clean up variables] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:52 Saturday 28 March 2026 19:30:52 -0400 (0:00:00.045) 0:08:27.264 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6 Saturday 28 March 2026 19:30:52 -0400 (0:00:00.036) 0:08:27.301 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type != \"stratis\"", "skip_reason": "Conditional result was False" } TASK [Verify fs label] ********************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14 Saturday 28 March 2026 19:30:52 -0400 (0:00:00.030) 0:08:27.331 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type != \"stratis\"", "skip_reason": "Conditional result was False" } TASK [See whether the device node is present] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3 Saturday 28 March 2026 19:30:52 -0400 (0:00:00.028) 0:08:27.359 ******** ok: [managed-node12] => { "changed": false, "stat": { "exists": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9 Saturday 28 March 2026 19:30:52 -0400 (0:00:00.462) 0:08:27.821 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present or storage_test_volume.type == 'disk'", "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the device node - 2] ********************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16 Saturday 28 March 2026 19:30:52 -0400 (0:00:00.056) 0:08:27.878 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about this volume] ********************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23 Saturday 28 March 2026 19:30:52 -0400 (0:00:00.087) 0:08:27.966 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Process volume type (set initial value) (1/2)] *************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29 Saturday 28 March 2026 19:30:52 -0400 (0:00:00.056) 0:08:28.023 ******** ok: [managed-node12] => { "ansible_facts": { "st_volume_type": "stratis" }, "changed": false } TASK [Process volume type (get RAID value) (2/2)] ****************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33 Saturday 28 March 2026 19:30:52 -0400 (0:00:00.054) 0:08:28.077 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == \"raid\"", "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38 Saturday 28 March 2026 19:30:53 -0400 (0:00:00.040) 0:08:28.118 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Stat the LUKS device, if encrypted] ************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3 Saturday 28 March 2026 19:30:53 -0400 (0:00:00.051) 0:08:28.169 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10 Saturday 28 March 2026 19:30:53 -0400 (0:00:00.038) 0:08:28.208 ******** ok: [managed-node12] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: cryptsetup TASK [Collect LUKS info for this volume] *************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16 Saturday 28 March 2026 19:30:53 -0400 (0:00:00.879) 0:08:29.088 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.encryption and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22 Saturday 28 March 2026 19:30:54 -0400 (0:00:00.020) 0:08:29.108 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29 Saturday 28 March 2026 19:30:54 -0400 (0:00:00.020) 0:08:29.128 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40 Saturday 28 March 2026 19:30:54 -0400 (0:00:00.019) 0:08:29.148 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46 Saturday 28 March 2026 19:30:54 -0400 (0:00:00.019) 0:08:29.167 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51 Saturday 28 March 2026 19:30:54 -0400 (0:00:00.018) 0:08:29.186 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:64 Saturday 28 March 2026 19:30:54 -0400 (0:00:00.018) 0:08:29.205 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:77 Saturday 28 March 2026 19:30:54 -0400 (0:00:00.019) 0:08:29.224 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Set test variables] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:90 Saturday 28 March 2026 19:30:54 -0400 (0:00:00.019) 0:08:29.243 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:96 Saturday 28 March 2026 19:30:54 -0400 (0:00:00.030) 0:08:29.274 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:103 Saturday 28 March 2026 19:30:54 -0400 (0:00:00.027) 0:08:29.301 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:111 Saturday 28 March 2026 19:30:54 -0400 (0:00:00.020) 0:08:29.321 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:119 Saturday 28 March 2026 19:30:54 -0400 (0:00:00.028) 0:08:29.350 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:127 Saturday 28 March 2026 19:30:54 -0400 (0:00:00.030) 0:08:29.381 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [Get information about RAID] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8 Saturday 28 March 2026 19:30:54 -0400 (0:00:00.040) 0:08:29.421 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14 Saturday 28 March 2026 19:30:54 -0400 (0:00:00.033) 0:08:29.455 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19 Saturday 28 March 2026 19:30:54 -0400 (0:00:00.036) 0:08:29.491 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24 Saturday 28 March 2026 19:30:54 -0400 (0:00:00.033) 0:08:29.525 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set chunk size regex] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29 Saturday 28 March 2026 19:30:54 -0400 (0:00:00.033) 0:08:29.559 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37 Saturday 28 March 2026 19:30:54 -0400 (0:00:00.034) 0:08:29.593 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46 Saturday 28 March 2026 19:30:54 -0400 (0:00:00.035) 0:08:29.629 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54 Saturday 28 March 2026 19:30:54 -0400 (0:00:00.031) 0:08:29.660 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62 Saturday 28 March 2026 19:30:54 -0400 (0:00:00.032) 0:08:29.693 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70 Saturday 28 March 2026 19:30:54 -0400 (0:00:00.031) 0:08:29.724 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Parse the actual size of the volume] ************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3 Saturday 28 March 2026 19:30:54 -0400 (0:00:00.037) 0:08:29.762 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present | bool", "skip_reason": "Conditional result was False" } TASK [Parse the requested size of the volume] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11 Saturday 28 March 2026 19:30:54 -0400 (0:00:00.033) 0:08:29.795 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present | bool", "skip_reason": "Conditional result was False" } TASK [Establish base value for expected size] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20 Saturday 28 March 2026 19:30:54 -0400 (0:00:00.033) 0:08:29.828 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present | bool", "skip_reason": "Conditional result was False" } TASK [Show expected size] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28 Saturday 28 March 2026 19:30:54 -0400 (0:00:00.031) 0:08:29.860 ******** ok: [managed-node12] => { "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined" } TASK [Get the size of parent/pool device] ************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32 Saturday 28 March 2026 19:30:54 -0400 (0:00:00.036) 0:08:29.896 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present | bool", "skip_reason": "Conditional result was False" } TASK [Show test pool] ********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46 Saturday 28 March 2026 19:30:54 -0400 (0:00:00.031) 0:08:29.928 ******** skipping: [managed-node12] => { "false_condition": "_storage_test_volume_present | bool" } TASK [Show test blockinfo] ***************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50 Saturday 28 March 2026 19:30:54 -0400 (0:00:00.032) 0:08:29.961 ******** skipping: [managed-node12] => { "false_condition": "_storage_test_volume_present | bool" } TASK [Show test pool size] ***************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54 Saturday 28 March 2026 19:30:54 -0400 (0:00:00.031) 0:08:29.992 ******** skipping: [managed-node12] => { "false_condition": "_storage_test_volume_present | bool" } TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58 Saturday 28 March 2026 19:30:54 -0400 (0:00:00.030) 0:08:30.023 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present | bool", "skip_reason": "Conditional result was False" } TASK [Default thin pool reserved space values] ********************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:68 Saturday 28 March 2026 19:30:54 -0400 (0:00:00.032) 0:08:30.055 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Default minimal thin pool reserved space size] *************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:72 Saturday 28 March 2026 19:30:54 -0400 (0:00:00.035) 0:08:30.091 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Default maximal thin pool reserved space size] *************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:77 Saturday 28 March 2026 19:30:55 -0400 (0:00:00.034) 0:08:30.126 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Calculate maximum usable space in thin pool] ***************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:83 Saturday 28 March 2026 19:30:55 -0400 (0:00:00.050) 0:08:30.176 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Apply upper size limit to max usable thin pool space] ******************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:87 Saturday 28 March 2026 19:30:55 -0400 (0:00:00.054) 0:08:30.230 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Apply lower size limit to max usable thin pool space] ******************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:92 Saturday 28 March 2026 19:30:55 -0400 (0:00:00.067) 0:08:30.299 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Convert maximum usable thin pool space from int to Size] ***************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:97 Saturday 28 March 2026 19:30:55 -0400 (0:00:00.072) 0:08:30.372 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Show max thin pool size] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:102 Saturday 28 March 2026 19:30:55 -0400 (0:00:00.084) 0:08:30.456 ******** skipping: [managed-node12] => { "false_condition": "storage_test_volume.thin | bool" } TASK [Show volume thin pool size] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:106 Saturday 28 March 2026 19:30:55 -0400 (0:00:00.062) 0:08:30.519 ******** skipping: [managed-node12] => { "false_condition": "storage_test_volume.thin | bool" } TASK [Show test volume size] *************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:110 Saturday 28 March 2026 19:30:55 -0400 (0:00:00.075) 0:08:30.594 ******** skipping: [managed-node12] => { "false_condition": "storage_test_volume.thin | bool" } TASK [Establish base value for expected thin pool size] ************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:114 Saturday 28 March 2026 19:30:55 -0400 (0:00:00.076) 0:08:30.670 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Calculate the expected size based on pool size and percentage value - 2] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:121 Saturday 28 March 2026 19:30:55 -0400 (0:00:00.094) 0:08:30.765 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Establish base value for expected thin pool volume size] ***************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:128 Saturday 28 March 2026 19:30:55 -0400 (0:00:00.072) 0:08:30.837 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Calculate the expected thin pool volume size based on percentage value] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:132 Saturday 28 March 2026 19:30:55 -0400 (0:00:00.075) 0:08:30.912 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Replace expected volume size with calculated value] ********************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:138 Saturday 28 March 2026 19:30:55 -0400 (0:00:00.051) 0:08:30.964 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Show actual size] ******************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:144 Saturday 28 March 2026 19:30:55 -0400 (0:00:00.084) 0:08:31.048 ******** ok: [managed-node12] => { "storage_test_actual_size": { "changed": false, "false_condition": "_storage_test_volume_present | bool", "skip_reason": "Conditional result was False", "skipped": true } } TASK [Show expected size - 2] ************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:148 Saturday 28 March 2026 19:30:56 -0400 (0:00:00.076) 0:08:31.124 ******** ok: [managed-node12] => { "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined" } TASK [Assert expected size is actual size] ************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:152 Saturday 28 March 2026 19:30:56 -0400 (0:00:00.061) 0:08:31.185 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present | bool", "skip_reason": "Conditional result was False" } TASK [Get information about the LV] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5 Saturday 28 March 2026 19:30:56 -0400 (0:00:00.047) 0:08:31.233 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13 Saturday 28 March 2026 19:30:56 -0400 (0:00:00.039) 0:08:31.273 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17 Saturday 28 March 2026 19:30:56 -0400 (0:00:00.037) 0:08:31.310 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Set LV cache size] ******************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24 Saturday 28 March 2026 19:30:56 -0400 (0:00:00.042) 0:08:31.352 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Parse the requested cache size] ****************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31 Saturday 28 March 2026 19:30:56 -0400 (0:00:00.040) 0:08:31.393 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Set expected cache size] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37 Saturday 28 March 2026 19:30:56 -0400 (0:00:00.052) 0:08:31.446 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42 Saturday 28 March 2026 19:30:56 -0400 (0:00:00.037) 0:08:31.483 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25 Saturday 28 March 2026 19:30:56 -0400 (0:00:00.033) 0:08:31.517 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:43 Saturday 28 March 2026 19:30:56 -0400 (0:00:00.037) 0:08:31.554 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Clean up variable namespace] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:52 Saturday 28 March 2026 19:30:56 -0400 (0:00:00.031) 0:08:31.585 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Create one Stratis pool on one disk] ************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:193 Saturday 28 March 2026 19:30:56 -0400 (0:00:00.041) 0:08:31.627 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tasks/run_role_with_clear_facts.yml for managed-node12 TASK [Clear facts] ************************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tasks/run_role_with_clear_facts.yml:10 Saturday 28 March 2026 19:30:56 -0400 (0:00:00.043) 0:08:31.670 ******** META: facts cleared TASK [Run the role] ************************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tasks/run_role_with_clear_facts.yml:24 Saturday 28 March 2026 19:30:56 -0400 (0:00:00.010) 0:08:31.681 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "__sr_failed_when is defined", "skip_reason": "Conditional result was False" } TASK [Run the role normally] *************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tasks/run_role_with_clear_facts.yml:34 Saturday 28 March 2026 19:30:56 -0400 (0:00:00.030) 0:08:31.711 ******** included: fedora.linux_system_roles.storage for managed-node12 TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Saturday 28 March 2026 19:30:56 -0400 (0:00:00.049) 0:08:31.760 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node12 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Saturday 28 March 2026 19:30:56 -0400 (0:00:00.041) 0:08:31.802 ******** ok: [managed-node12] TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Saturday 28 March 2026 19:30:57 -0400 (0:00:00.975) 0:08:32.777 ******** skipping: [managed-node12] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node12] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node12] => (item=CentOS_10.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-fs", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "xfsprogs", "stratisd", "stratis-cli", "{{ 'libblockdev-s390' if ansible_facts['architecture'] == 's390x' else 'libblockdev' }}", "vdo" ] }, "ansible_included_var_files": [ "/tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node12] => (item=CentOS_10.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-fs", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "xfsprogs", "stratisd", "stratis-cli", "{{ 'libblockdev-s390' if ansible_facts['architecture'] == 's390x' else 'libblockdev' }}", "vdo" ] }, "ansible_included_var_files": [ "/tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Saturday 28 March 2026 19:30:57 -0400 (0:00:00.074) 0:08:32.852 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "not __storage_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Saturday 28 March 2026 19:30:57 -0400 (0:00:00.033) 0:08:32.886 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "not __storage_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Saturday 28 March 2026 19:30:57 -0400 (0:00:00.031) 0:08:32.917 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Saturday 28 March 2026 19:30:57 -0400 (0:00:00.021) 0:08:32.938 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Saturday 28 March 2026 19:30:57 -0400 (0:00:00.028) 0:08:32.967 ******** redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node12 TASK [fedora.linux_system_roles.storage : Add repo key] ************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Saturday 28 March 2026 19:30:57 -0400 (0:00:00.089) 0:08:33.056 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_blivet_custom_repo.key is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Add blivet repo] ********************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:15 Saturday 28 March 2026 19:30:57 -0400 (0:00:00.040) 0:08:33.097 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_blivet_custom_repo.key is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:20 Saturday 28 March 2026 19:30:58 -0400 (0:00:00.030) 0:08:33.128 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:27 Saturday 28 March 2026 19:30:58 -0400 (0:00:00.030) 0:08:33.158 ******** ok: [managed-node12] => { "storage_pools | d([])": [ { "disks": "sda", "name": "foo", "type": "stratis" } ] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:32 Saturday 28 March 2026 19:30:58 -0400 (0:00:00.039) 0:08:33.198 ******** ok: [managed-node12] => { "storage_volumes | d([])": [] } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37 Saturday 28 March 2026 19:30:58 -0400 (0:00:00.038) 0:08:33.236 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:50 Saturday 28 March 2026 19:30:58 -0400 (0:00:00.031) 0:08:33.267 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:56 Saturday 28 March 2026 19:30:58 -0400 (0:00:00.035) 0:08:33.302 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:76 Saturday 28 March 2026 19:30:58 -0400 (0:00:00.050) 0:08:33.353 ******** ok: [managed-node12] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "apt-daily.service": { "name": "apt-daily.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fcoe.service": { "name": "fcoe.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "iscsi-shutdown.service": { "name": "iscsi-shutdown.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsi.service": { "name": "iscsi.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsid.service": { "name": "iscsid.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-activation-early.service": { "name": "lvm2-activation-early.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_multipath.service": { "name": "modprobe@dm_multipath.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "raid-check.service": { "name": "raid-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "rbdmap.service": { "name": "rbdmap.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-unix-local@.service": { "name": "sshd-unix-local@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd-vsock@.service": { "name": "sshd-vsock@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "stratis-fstab-setup-with-network@.service": { "name": "stratis-fstab-setup-with-network@.service", "source": "systemd", "state": "unknown", "status": "static" }, "stratis-fstab-setup@.service": { "name": "stratis-fstab-setup@.service", "source": "systemd", "state": "unknown", "status": "static" }, "stratisd-min-postinitrd.service": { "name": "stratisd-min-postinitrd.service", "source": "systemd", "state": "inactive", "status": "static" }, "stratisd.service": { "name": "stratisd.service", "source": "systemd", "state": "running", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-userdbd.service": { "name": "systemd-userdbd.service", "source": "systemd", "state": "running", "status": "indirect" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:82 Saturday 28 March 2026 19:31:00 -0400 (0:00:02.173) 0:08:35.526 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:88 Saturday 28 March 2026 19:31:00 -0400 (0:00:00.039) 0:08:35.566 ******** changed: [managed-node12] => { "actions": [ { "action": "create format", "device": "/dev/sda", "fs_type": "stratis" }, { "action": "create device", "device": "/dev/stratis/foo", "fs_type": null } ], "changed": true, "crypts": [], "leaves": [ "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/xvda2", "/dev/stratis/foo" ], "mounts": [], "packages": [ "stratisd", "xfsprogs", "stratis-cli" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "stratis", "volumes": [] } ], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:103 Saturday 28 March 2026 19:31:04 -0400 (0:00:03.962) 0:08:39.529 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_udevadm_trigger | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ****** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:110 Saturday 28 March 2026 19:31:04 -0400 (0:00:00.021) 0:08:39.550 ******** ok: [managed-node12] => { "changed": false, "stat": { "atime": 1774740645.8607492, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "6aeccfbb3223a634b983c3c21792c1ba90809bb8", "ctime": 1774740641.1286864, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 230686985, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1774740641.1286864, "nlink": 1, "path": "/etc/fstab", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1408, "uid": 0, "version": "1679064280", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:115 Saturday 28 March 2026 19:31:04 -0400 (0:00:00.414) 0:08:39.964 ******** ok: [managed-node12] => { "backup": "", "changed": false } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:133 Saturday 28 March 2026 19:31:05 -0400 (0:00:00.450) 0:08:40.415 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:139 Saturday 28 March 2026 19:31:05 -0400 (0:00:00.066) 0:08:40.482 ******** ok: [managed-node12] => { "blivet_output": { "actions": [ { "action": "create format", "device": "/dev/sda", "fs_type": "stratis" }, { "action": "create device", "device": "/dev/stratis/foo", "fs_type": null } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/xvda2", "/dev/stratis/foo" ], "mounts": [], "packages": [ "stratisd", "xfsprogs", "stratis-cli" ], "pools": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "stratis", "volumes": [] } ], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148 Saturday 28 March 2026 19:31:05 -0400 (0:00:00.069) 0:08:40.551 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "stratis", "volumes": [] } ] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:152 Saturday 28 March 2026 19:31:05 -0400 (0:00:00.056) 0:08:40.608 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:168 Saturday 28 March 2026 19:31:05 -0400 (0:00:00.051) 0:08:40.659 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:179 Saturday 28 March 2026 19:31:05 -0400 (0:00:00.032) 0:08:40.692 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "blivet_output['mounts'] | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:184 Saturday 28 March 2026 19:31:05 -0400 (0:00:00.052) 0:08:40.744 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195 Saturday 28 March 2026 19:31:05 -0400 (0:00:00.130) 0:08:40.875 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:207 Saturday 28 March 2026 19:31:05 -0400 (0:00:00.040) 0:08:40.916 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "blivet_output['mounts'] | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:215 Saturday 28 March 2026 19:31:05 -0400 (0:00:00.068) 0:08:40.985 ******** ok: [managed-node12] => { "changed": false, "stat": { "atime": 1774737762.6954868, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1774342704.475, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 4194435, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1774342323.629, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "1224473831", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:220 Saturday 28 March 2026 19:31:06 -0400 (0:00:00.534) 0:08:41.520 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:242 Saturday 28 March 2026 19:31:06 -0400 (0:00:00.050) 0:08:41.570 ******** ok: [managed-node12] TASK [Verify role results - 8] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:201 Saturday 28 March 2026 19:31:07 -0400 (0:00:01.052) 0:08:42.622 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node12 TASK [Print out pool information] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2 Saturday 28 March 2026 19:31:07 -0400 (0:00:00.056) 0:08:42.678 ******** ok: [managed-node12] => { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "stratis", "volumes": [] } ] } TASK [Print out volume information] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7 Saturday 28 March 2026 19:31:07 -0400 (0:00:00.046) 0:08:42.724 ******** skipping: [managed-node12] => { "false_condition": "_storage_volumes_list | length > 0" } TASK [Collect info about the volumes.] ***************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15 Saturday 28 March 2026 19:31:07 -0400 (0:00:00.058) 0:08:42.783 ******** ok: [managed-node12] => { "changed": false, "info": { "/dev/loop0": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/loop0", "size": "0B", "type": "loop", "uuid": "" }, "/dev/mapper/stratis-1-private-4348e4e826364b779eb680c1a13b593d-flex-mdv": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/mapper/stratis-1-private-4348e4e826364b779eb680c1a13b593d-flex-mdv", "size": "512M", "type": "stratis", "uuid": "" }, "/dev/mapper/stratis-1-private-4348e4e826364b779eb680c1a13b593d-flex-thindata": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/mapper/stratis-1-private-4348e4e826364b779eb680c1a13b593d-flex-thindata", "size": "9.2G", "type": "stratis", "uuid": "" }, "/dev/mapper/stratis-1-private-4348e4e826364b779eb680c1a13b593d-flex-thinmeta": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/mapper/stratis-1-private-4348e4e826364b779eb680c1a13b593d-flex-thinmeta", "size": "6M", "type": "stratis", "uuid": "" }, "/dev/mapper/stratis-1-private-4348e4e826364b779eb680c1a13b593d-physical-cache": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/mapper/stratis-1-private-4348e4e826364b779eb680c1a13b593d-physical-cache", "size": "9.7G", "type": "stratis", "uuid": "" }, "/dev/mapper/stratis-1-private-4348e4e826364b779eb680c1a13b593d-physical-originsub": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/mapper/stratis-1-private-4348e4e826364b779eb680c1a13b593d-physical-originsub", "size": "9.7G", "type": "stratis", "uuid": "" }, "/dev/mapper/stratis-1-private-4348e4e826364b779eb680c1a13b593d-thinpool-pool": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/mapper/stratis-1-private-4348e4e826364b779eb680c1a13b593d-thinpool-pool", "size": "9.2G", "type": "stratis", "uuid": "" }, "/dev/sda": { "fstype": "stratis", "label": "", "mountpoint": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "6ef5557e-5daa-4322-85cd-5a8e73a322f6" }, "/dev/sdb": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda1", "size": "1M", "type": "partition", "uuid": "" }, "/dev/xvda2": { "fstype": "xfs", "label": "", "mountpoint": "/", "name": "/dev/xvda2", "size": "250G", "type": "partition", "uuid": "94fc577e-f1df-44bb-8e86-63b9a68f8f7f" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20 Saturday 28 March 2026 19:31:08 -0400 (0:00:00.544) 0:08:43.327 ******** ok: [managed-node12] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.002821", "end": "2026-03-28 19:31:08.581863", "rc": 0, "start": "2026-03-28 19:31:08.579042" } STDOUT: # system_role:storage # # /etc/fstab # Created by anaconda on Tue Mar 24 08:52:03 2026 # # Accessible filesystems, by reference, are maintained under '/dev/disk/'. # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info. # # After editing this file, run 'systemctl daemon-reload' to update systemd # units generated from this file. # UUID=94fc577e-f1df-44bb-8e86-63b9a68f8f7f / xfs defaults 0 0 ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25 Saturday 28 March 2026 19:31:08 -0400 (0:00:00.420) 0:08:43.748 ******** ok: [managed-node12] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.002799", "end": "2026-03-28 19:31:09.002902", "failed_when_result": false, "rc": 0, "start": "2026-03-28 19:31:09.000103" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34 Saturday 28 March 2026 19:31:09 -0400 (0:00:00.443) 0:08:44.192 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node12 => (item={'disks': ['sda'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'stratis', 'volumes': []}) TASK [Set _storage_pool_tests] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5 Saturday 28 March 2026 19:31:09 -0400 (0:00:00.135) 0:08:44.327 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Get VG shared value status] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18 Saturday 28 March 2026 19:31:09 -0400 (0:00:00.048) 0:08:44.376 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'", "skip_reason": "Conditional result was False" } TASK [Verify that VG shared value checks out] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24 Saturday 28 March 2026 19:31:09 -0400 (0:00:00.043) 0:08:44.420 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'", "skip_reason": "Conditional result was False" } TASK [Verify pool subset] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34 Saturday 28 March 2026 19:31:09 -0400 (0:00:00.033) 0:08:44.453 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node12 => (item=members) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node12 => (item=volumes) TASK [Set test variables] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2 Saturday 28 March 2026 19:31:09 -0400 (0:00:00.108) 0:08:44.561 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Get the canonical device path for each member device] ******************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8 Saturday 28 March 2026 19:31:09 -0400 (0:00:00.066) 0:08:44.628 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Set pvs lvm length] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17 Saturday 28 March 2026 19:31:09 -0400 (0:00:00.029) 0:08:44.657 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Set pool pvs] ************************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22 Saturday 28 March 2026 19:31:09 -0400 (0:00:00.060) 0:08:44.718 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Verify PV count] ********************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27 Saturday 28 March 2026 19:31:09 -0400 (0:00:00.034) 0:08:44.752 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Set expected pv type] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36 Saturday 28 March 2026 19:31:09 -0400 (0:00:00.048) 0:08:44.801 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Set expected pv type - 2] ************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41 Saturday 28 March 2026 19:31:09 -0400 (0:00:00.035) 0:08:44.837 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm' and not storage_test_pool.encryption", "skip_reason": "Conditional result was False" } TASK [Set expected pv type - 3] ************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46 Saturday 28 March 2026 19:31:09 -0400 (0:00:00.052) 0:08:44.889 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:55 Saturday 28 March 2026 19:31:09 -0400 (0:00:00.063) 0:08:44.953 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Check that blivet supports PV grow to fill] ****************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:68 Saturday 28 March 2026 19:31:09 -0400 (0:00:00.031) 0:08:44.984 ******** ok: [managed-node12] => { "changed": false, "failed_when_result": false, "rc": 0 } STDERR: OpenSSH_9.9p1, OpenSSL 3.5.5 27 Jan 2026 debug1: Reading configuration data /root/.ssh/config debug1: Reading configuration data /etc/ssh/ssh_config debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf debug2: checking match for 'final all' host 10.31.11.27 originally 10.31.11.27 debug2: match not found debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config debug1: configuration requests final Match pass debug2: resolve_canonicalize: hostname 10.31.11.27 is address debug1: re-parsing configuration debug1: Reading configuration data /root/.ssh/config debug1: Reading configuration data /etc/ssh/ssh_config debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf debug2: checking match for 'final all' host 10.31.11.27 originally 10.31.11.27 debug2: match found debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config debug1: auto-mux: Trying existing master at '/root/.ansible/cp/e1141b92e1' debug2: fd 3 setting O_NONBLOCK debug2: mux_client_hello_exchange: master version 4 debug1: mux_client_request_session: master session id: 2 debug2: Received exit status from master 0 Shared connection to 10.31.11.27 closed. TASK [Verify that PVs fill the whole devices when they should] ***************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:78 Saturday 28 March 2026 19:31:10 -0400 (0:00:00.545) 0:08:45.529 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Check MD RAID] *********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:88 Saturday 28 March 2026 19:31:10 -0400 (0:00:00.064) 0:08:45.594 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node12 TASK [Get information about RAID] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8 Saturday 28 March 2026 19:31:10 -0400 (0:00:00.180) 0:08:45.776 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14 Saturday 28 March 2026 19:31:10 -0400 (0:00:00.074) 0:08:45.850 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19 Saturday 28 March 2026 19:31:10 -0400 (0:00:00.071) 0:08:45.922 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24 Saturday 28 March 2026 19:31:10 -0400 (0:00:00.065) 0:08:45.987 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set md chunk size regex] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29 Saturday 28 March 2026 19:31:10 -0400 (0:00:00.068) 0:08:46.055 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37 Saturday 28 March 2026 19:31:11 -0400 (0:00:00.058) 0:08:46.113 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46 Saturday 28 March 2026 19:31:11 -0400 (0:00:00.057) 0:08:46.171 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55 Saturday 28 March 2026 19:31:11 -0400 (0:00:00.059) 0:08:46.230 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64 Saturday 28 March 2026 19:31:11 -0400 (0:00:00.060) 0:08:46.291 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74 Saturday 28 March 2026 19:31:11 -0400 (0:00:00.072) 0:08:46.364 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83 Saturday 28 March 2026 19:31:11 -0400 (0:00:00.065) 0:08:46.430 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_chunk_size_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:91 Saturday 28 March 2026 19:31:11 -0400 (0:00:00.069) 0:08:46.499 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node12 TASK [Validate pool member LVM RAID settings] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2 Saturday 28 March 2026 19:31:11 -0400 (0:00:00.102) 0:08:46.602 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Check Thin Pools] ******************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:94 Saturday 28 March 2026 19:31:11 -0400 (0:00:00.040) 0:08:46.642 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node12 TASK [Validate pool member thinpool settings] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2 Saturday 28 March 2026 19:31:11 -0400 (0:00:00.135) 0:08:46.777 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Check member encryption] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:97 Saturday 28 March 2026 19:31:11 -0400 (0:00:00.038) 0:08:46.815 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node12 TASK [Set test variables] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5 Saturday 28 March 2026 19:31:11 -0400 (0:00:00.133) 0:08:46.949 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10 Saturday 28 March 2026 19:31:11 -0400 (0:00:00.109) 0:08:47.058 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Validate pool member crypttab entries] *********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17 Saturday 28 March 2026 19:31:11 -0400 (0:00:00.035) 0:08:47.094 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24 Saturday 28 March 2026 19:31:12 -0400 (0:00:00.029) 0:08:47.123 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:100 Saturday 28 March 2026 19:31:12 -0400 (0:00:00.036) 0:08:47.160 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node12 TASK [Validate pool member VDO settings] *************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2 Saturday 28 March 2026 19:31:12 -0400 (0:00:00.134) 0:08:47.295 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Check Stratis] *********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:103 Saturday 28 March 2026 19:31:12 -0400 (0:00:00.024) 0:08:47.320 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node12 TASK [Get stratis pool information] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6 Saturday 28 March 2026 19:31:12 -0400 (0:00:00.125) 0:08:47.445 ******** ok: [managed-node12] => { "changed": false, "rc": 0 } STDOUT: {"name": "foo", "encrypted": false, "key_desc": null, "clevis_pin": null, "clevis_args": {}} STDERR: OpenSSH_9.9p1, OpenSSL 3.5.5 27 Jan 2026 debug1: Reading configuration data /root/.ssh/config debug1: Reading configuration data /etc/ssh/ssh_config debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf debug2: checking match for 'final all' host 10.31.11.27 originally 10.31.11.27 debug2: match not found debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config debug1: configuration requests final Match pass debug2: resolve_canonicalize: hostname 10.31.11.27 is address debug1: re-parsing configuration debug1: Reading configuration data /root/.ssh/config debug1: Reading configuration data /etc/ssh/ssh_config debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf debug2: checking match for 'final all' host 10.31.11.27 originally 10.31.11.27 debug2: match found debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config debug1: auto-mux: Trying existing master at '/root/.ansible/cp/e1141b92e1' debug2: fd 3 setting O_NONBLOCK debug2: mux_client_hello_exchange: master version 4 debug1: mux_client_request_session: master session id: 2 debug2: Received exit status from master 0 Shared connection to 10.31.11.27 closed. TASK [Print script output] ***************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15 Saturday 28 March 2026 19:31:12 -0400 (0:00:00.614) 0:08:48.060 ******** ok: [managed-node12] => {} MSG: {'name': 'foo', 'encrypted': False, 'key_desc': None, 'clevis_pin': None, 'clevis_args': {}} TASK [Get information about Stratis] ******************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:19 Saturday 28 March 2026 19:31:13 -0400 (0:00:00.100) 0:08:48.160 ******** ok: [managed-node12] => { "ansible_facts": { "_stratis_pool_info": { "clevis_args": {}, "clevis_pin": null, "encrypted": false, "key_desc": null, "name": "foo" } }, "changed": false } TASK [Verify that the pools was created] *************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:23 Saturday 28 March 2026 19:31:13 -0400 (0:00:00.067) 0:08:48.227 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Verify that encryption is correctly set] ********************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:30 Saturday 28 March 2026 19:31:13 -0400 (0:00:00.066) 0:08:48.294 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.encryption", "skip_reason": "Conditional result was False" } TASK [Verify that Clevis/Tang encryption is correctly set] ********************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:39 Saturday 28 March 2026 19:31:13 -0400 (0:00:00.054) 0:08:48.349 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.encryption", "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:49 Saturday 28 March 2026 19:31:13 -0400 (0:00:00.072) 0:08:48.421 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_stratis_report": null }, "changed": false } TASK [Clean up test variables] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:106 Saturday 28 March 2026 19:31:13 -0400 (0:00:00.076) 0:08:48.498 ******** ok: [managed-node12] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3 Saturday 28 March 2026 19:31:13 -0400 (0:00:00.076) 0:08:48.575 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:43 Saturday 28 March 2026 19:31:13 -0400 (0:00:00.037) 0:08:48.613 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Clean up variable namespace] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:52 Saturday 28 March 2026 19:31:13 -0400 (0:00:00.048) 0:08:48.661 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Add the second disk to the pool] ***************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:204 Saturday 28 March 2026 19:31:13 -0400 (0:00:00.087) 0:08:48.748 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tasks/run_role_with_clear_facts.yml for managed-node12 TASK [Clear facts] ************************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tasks/run_role_with_clear_facts.yml:10 Saturday 28 March 2026 19:31:13 -0400 (0:00:00.083) 0:08:48.831 ******** META: facts cleared TASK [Run the role] ************************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tasks/run_role_with_clear_facts.yml:24 Saturday 28 March 2026 19:31:13 -0400 (0:00:00.010) 0:08:48.842 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "__sr_failed_when is defined", "skip_reason": "Conditional result was False" } TASK [Run the role normally] *************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tasks/run_role_with_clear_facts.yml:34 Saturday 28 March 2026 19:31:13 -0400 (0:00:00.045) 0:08:48.888 ******** included: fedora.linux_system_roles.storage for managed-node12 TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Saturday 28 March 2026 19:31:13 -0400 (0:00:00.096) 0:08:48.985 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node12 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Saturday 28 March 2026 19:31:13 -0400 (0:00:00.047) 0:08:49.032 ******** ok: [managed-node12] TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Saturday 28 March 2026 19:31:15 -0400 (0:00:01.166) 0:08:50.199 ******** skipping: [managed-node12] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node12] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node12] => (item=CentOS_10.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-fs", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "xfsprogs", "stratisd", "stratis-cli", "{{ 'libblockdev-s390' if ansible_facts['architecture'] == 's390x' else 'libblockdev' }}", "vdo" ] }, "ansible_included_var_files": [ "/tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node12] => (item=CentOS_10.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-fs", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "xfsprogs", "stratisd", "stratis-cli", "{{ 'libblockdev-s390' if ansible_facts['architecture'] == 's390x' else 'libblockdev' }}", "vdo" ] }, "ansible_included_var_files": [ "/tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Saturday 28 March 2026 19:31:15 -0400 (0:00:00.069) 0:08:50.269 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "not __storage_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Saturday 28 March 2026 19:31:15 -0400 (0:00:00.031) 0:08:50.301 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "not __storage_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Saturday 28 March 2026 19:31:15 -0400 (0:00:00.023) 0:08:50.325 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Saturday 28 March 2026 19:31:15 -0400 (0:00:00.042) 0:08:50.367 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Saturday 28 March 2026 19:31:15 -0400 (0:00:00.035) 0:08:50.403 ******** redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node12 TASK [fedora.linux_system_roles.storage : Add repo key] ************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Saturday 28 March 2026 19:31:15 -0400 (0:00:00.081) 0:08:50.485 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_blivet_custom_repo.key is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Add blivet repo] ********************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:15 Saturday 28 March 2026 19:31:15 -0400 (0:00:00.035) 0:08:50.521 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_blivet_custom_repo.key is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:20 Saturday 28 March 2026 19:31:15 -0400 (0:00:00.035) 0:08:50.556 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:27 Saturday 28 March 2026 19:31:15 -0400 (0:00:00.048) 0:08:50.605 ******** ok: [managed-node12] => { "storage_pools | d([])": [ { "disks": [ "sda", "sdb" ], "name": "foo", "type": "stratis" } ] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:32 Saturday 28 March 2026 19:31:15 -0400 (0:00:00.068) 0:08:50.673 ******** ok: [managed-node12] => { "storage_volumes | d([])": [] } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37 Saturday 28 March 2026 19:31:15 -0400 (0:00:00.061) 0:08:50.735 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:50 Saturday 28 March 2026 19:31:15 -0400 (0:00:00.036) 0:08:50.772 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:56 Saturday 28 March 2026 19:31:15 -0400 (0:00:00.058) 0:08:50.830 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:76 Saturday 28 March 2026 19:31:15 -0400 (0:00:00.043) 0:08:50.874 ******** ok: [managed-node12] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "apt-daily.service": { "name": "apt-daily.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fcoe.service": { "name": "fcoe.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "iscsi-shutdown.service": { "name": "iscsi-shutdown.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsi.service": { "name": "iscsi.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsid.service": { "name": "iscsid.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-activation-early.service": { "name": "lvm2-activation-early.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_multipath.service": { "name": "modprobe@dm_multipath.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "raid-check.service": { "name": "raid-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "rbdmap.service": { "name": "rbdmap.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-unix-local@.service": { "name": "sshd-unix-local@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd-vsock@.service": { "name": "sshd-vsock@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "stratis-fstab-setup-with-network@.service": { "name": "stratis-fstab-setup-with-network@.service", "source": "systemd", "state": "unknown", "status": "static" }, "stratis-fstab-setup@.service": { "name": "stratis-fstab-setup@.service", "source": "systemd", "state": "unknown", "status": "static" }, "stratisd-min-postinitrd.service": { "name": "stratisd-min-postinitrd.service", "source": "systemd", "state": "inactive", "status": "static" }, "stratisd.service": { "name": "stratisd.service", "source": "systemd", "state": "running", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-userdbd.service": { "name": "systemd-userdbd.service", "source": "systemd", "state": "running", "status": "indirect" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:82 Saturday 28 March 2026 19:31:17 -0400 (0:00:02.126) 0:08:53.001 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:88 Saturday 28 March 2026 19:31:17 -0400 (0:00:00.050) 0:08:53.051 ******** changed: [managed-node12] => { "actions": [ { "action": "create format", "device": "/dev/sdb", "fs_type": "stratis" }, { "action": "add container member", "device": "/dev/sdb", "fs_type": null } ], "changed": true, "crypts": [], "leaves": [ "/dev/stratis/foo", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/xvda2" ], "mounts": [], "packages": [ "stratis-cli", "stratisd", "xfsprogs" ], "pools": [ { "disks": [ "sda", "sdb" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "stratis", "volumes": [] } ], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:103 Saturday 28 March 2026 19:31:23 -0400 (0:00:05.189) 0:08:58.241 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_udevadm_trigger | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ****** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:110 Saturday 28 March 2026 19:31:23 -0400 (0:00:00.022) 0:08:58.263 ******** ok: [managed-node12] => { "changed": false, "stat": { "atime": 1774740645.8607492, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "6aeccfbb3223a634b983c3c21792c1ba90809bb8", "ctime": 1774740641.1286864, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 230686985, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1774740641.1286864, "nlink": 1, "path": "/etc/fstab", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1408, "uid": 0, "version": "1679064280", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:115 Saturday 28 March 2026 19:31:23 -0400 (0:00:00.400) 0:08:58.664 ******** ok: [managed-node12] => { "backup": "", "changed": false } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:133 Saturday 28 March 2026 19:31:23 -0400 (0:00:00.379) 0:08:59.044 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:139 Saturday 28 March 2026 19:31:23 -0400 (0:00:00.032) 0:08:59.077 ******** ok: [managed-node12] => { "blivet_output": { "actions": [ { "action": "create format", "device": "/dev/sdb", "fs_type": "stratis" }, { "action": "add container member", "device": "/dev/sdb", "fs_type": null } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/stratis/foo", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/xvda2" ], "mounts": [], "packages": [ "stratis-cli", "stratisd", "xfsprogs" ], "pools": [ { "disks": [ "sda", "sdb" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "stratis", "volumes": [] } ], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148 Saturday 28 March 2026 19:31:24 -0400 (0:00:00.026) 0:08:59.103 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda", "sdb" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "stratis", "volumes": [] } ] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:152 Saturday 28 March 2026 19:31:24 -0400 (0:00:00.024) 0:08:59.127 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:168 Saturday 28 March 2026 19:31:24 -0400 (0:00:00.022) 0:08:59.150 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:179 Saturday 28 March 2026 19:31:24 -0400 (0:00:00.017) 0:08:59.168 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "blivet_output['mounts'] | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:184 Saturday 28 March 2026 19:31:24 -0400 (0:00:00.020) 0:08:59.188 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195 Saturday 28 March 2026 19:31:24 -0400 (0:00:00.017) 0:08:59.205 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:207 Saturday 28 March 2026 19:31:24 -0400 (0:00:00.017) 0:08:59.223 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "blivet_output['mounts'] | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:215 Saturday 28 March 2026 19:31:24 -0400 (0:00:00.024) 0:08:59.248 ******** ok: [managed-node12] => { "changed": false, "stat": { "atime": 1774737762.6954868, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1774342704.475, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 4194435, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1774342323.629, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "1224473831", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:220 Saturday 28 March 2026 19:31:24 -0400 (0:00:00.398) 0:08:59.646 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:242 Saturday 28 March 2026 19:31:24 -0400 (0:00:00.016) 0:08:59.663 ******** ok: [managed-node12] TASK [Verify role results - 9] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:212 Saturday 28 March 2026 19:31:25 -0400 (0:00:00.998) 0:09:00.661 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node12 TASK [Print out pool information] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2 Saturday 28 March 2026 19:31:25 -0400 (0:00:00.044) 0:09:00.705 ******** ok: [managed-node12] => { "_storage_pools_list": [ { "disks": [ "sda", "sdb" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "stratis", "volumes": [] } ] } TASK [Print out volume information] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7 Saturday 28 March 2026 19:31:25 -0400 (0:00:00.030) 0:09:00.736 ******** skipping: [managed-node12] => { "false_condition": "_storage_volumes_list | length > 0" } TASK [Collect info about the volumes.] ***************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15 Saturday 28 March 2026 19:31:25 -0400 (0:00:00.022) 0:09:00.758 ******** ok: [managed-node12] => { "changed": false, "info": { "/dev/loop0": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/loop0", "size": "0B", "type": "loop", "uuid": "" }, "/dev/mapper/stratis-1-private-4348e4e826364b779eb680c1a13b593d-flex-mdv": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/mapper/stratis-1-private-4348e4e826364b779eb680c1a13b593d-flex-mdv", "size": "512M", "type": "stratis", "uuid": "" }, "/dev/mapper/stratis-1-private-4348e4e826364b779eb680c1a13b593d-flex-thindata": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/mapper/stratis-1-private-4348e4e826364b779eb680c1a13b593d-flex-thindata", "size": "18.9G", "type": "stratis", "uuid": "" }, "/dev/mapper/stratis-1-private-4348e4e826364b779eb680c1a13b593d-flex-thinmeta": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/mapper/stratis-1-private-4348e4e826364b779eb680c1a13b593d-flex-thinmeta", "size": "9M", "type": "stratis", "uuid": "" }, "/dev/mapper/stratis-1-private-4348e4e826364b779eb680c1a13b593d-physical-cache": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/mapper/stratis-1-private-4348e4e826364b779eb680c1a13b593d-physical-cache", "size": "19.4G", "type": "stratis", "uuid": "" }, "/dev/mapper/stratis-1-private-4348e4e826364b779eb680c1a13b593d-physical-originsub": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/mapper/stratis-1-private-4348e4e826364b779eb680c1a13b593d-physical-originsub", "size": "19.4G", "type": "stratis", "uuid": "" }, "/dev/mapper/stratis-1-private-4348e4e826364b779eb680c1a13b593d-thinpool-pool": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/mapper/stratis-1-private-4348e4e826364b779eb680c1a13b593d-thinpool-pool", "size": "18.9G", "type": "stratis", "uuid": "" }, "/dev/sda": { "fstype": "stratis", "label": "", "mountpoint": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "6ef5557e-5daa-4322-85cd-5a8e73a322f6" }, "/dev/sdb": { "fstype": "stratis", "label": "", "mountpoint": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "021bf074-a9ab-4abf-bbd9-6c26f88c4e2b" }, "/dev/sdc": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda1", "size": "1M", "type": "partition", "uuid": "" }, "/dev/xvda2": { "fstype": "xfs", "label": "", "mountpoint": "/", "name": "/dev/xvda2", "size": "250G", "type": "partition", "uuid": "94fc577e-f1df-44bb-8e86-63b9a68f8f7f" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20 Saturday 28 March 2026 19:31:26 -0400 (0:00:00.406) 0:09:01.165 ******** ok: [managed-node12] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.002966", "end": "2026-03-28 19:31:26.428304", "rc": 0, "start": "2026-03-28 19:31:26.425338" } STDOUT: # system_role:storage # # /etc/fstab # Created by anaconda on Tue Mar 24 08:52:03 2026 # # Accessible filesystems, by reference, are maintained under '/dev/disk/'. # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info. # # After editing this file, run 'systemctl daemon-reload' to update systemd # units generated from this file. # UUID=94fc577e-f1df-44bb-8e86-63b9a68f8f7f / xfs defaults 0 0 ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25 Saturday 28 March 2026 19:31:26 -0400 (0:00:00.424) 0:09:01.590 ******** ok: [managed-node12] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.003039", "end": "2026-03-28 19:31:26.806201", "failed_when_result": false, "rc": 0, "start": "2026-03-28 19:31:26.803162" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34 Saturday 28 March 2026 19:31:26 -0400 (0:00:00.389) 0:09:01.980 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node12 => (item={'disks': ['sda', 'sdb'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'stratis', 'volumes': []}) TASK [Set _storage_pool_tests] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5 Saturday 28 March 2026 19:31:26 -0400 (0:00:00.064) 0:09:02.045 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Get VG shared value status] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18 Saturday 28 March 2026 19:31:26 -0400 (0:00:00.036) 0:09:02.081 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'", "skip_reason": "Conditional result was False" } TASK [Verify that VG shared value checks out] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24 Saturday 28 March 2026 19:31:27 -0400 (0:00:00.020) 0:09:02.101 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'", "skip_reason": "Conditional result was False" } TASK [Verify pool subset] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34 Saturday 28 March 2026 19:31:27 -0400 (0:00:00.023) 0:09:02.125 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node12 => (item=members) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node12 => (item=volumes) TASK [Set test variables] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2 Saturday 28 March 2026 19:31:27 -0400 (0:00:00.058) 0:09:02.183 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Get the canonical device path for each member device] ******************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8 Saturday 28 March 2026 19:31:27 -0400 (0:00:00.022) 0:09:02.206 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Set pvs lvm length] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17 Saturday 28 March 2026 19:31:27 -0400 (0:00:00.024) 0:09:02.230 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Set pool pvs] ************************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22 Saturday 28 March 2026 19:31:27 -0400 (0:00:00.020) 0:09:02.251 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Verify PV count] ********************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27 Saturday 28 March 2026 19:31:27 -0400 (0:00:00.090) 0:09:02.341 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Set expected pv type] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36 Saturday 28 March 2026 19:31:27 -0400 (0:00:00.026) 0:09:02.368 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Set expected pv type - 2] ************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41 Saturday 28 March 2026 19:31:27 -0400 (0:00:00.030) 0:09:02.398 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm' and not storage_test_pool.encryption", "skip_reason": "Conditional result was False" } TASK [Set expected pv type - 3] ************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46 Saturday 28 March 2026 19:31:27 -0400 (0:00:00.026) 0:09:02.425 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:55 Saturday 28 March 2026 19:31:27 -0400 (0:00:00.025) 0:09:02.451 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Check that blivet supports PV grow to fill] ****************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:68 Saturday 28 March 2026 19:31:27 -0400 (0:00:00.018) 0:09:02.469 ******** ok: [managed-node12] => { "changed": false, "failed_when_result": false, "rc": 0 } STDERR: OpenSSH_9.9p1, OpenSSL 3.5.5 27 Jan 2026 debug1: Reading configuration data /root/.ssh/config debug1: Reading configuration data /etc/ssh/ssh_config debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf debug2: checking match for 'final all' host 10.31.11.27 originally 10.31.11.27 debug2: match not found debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config debug1: configuration requests final Match pass debug2: resolve_canonicalize: hostname 10.31.11.27 is address debug1: re-parsing configuration debug1: Reading configuration data /root/.ssh/config debug1: Reading configuration data /etc/ssh/ssh_config debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf debug2: checking match for 'final all' host 10.31.11.27 originally 10.31.11.27 debug2: match found debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config debug1: auto-mux: Trying existing master at '/root/.ansible/cp/e1141b92e1' debug2: fd 3 setting O_NONBLOCK debug2: mux_client_hello_exchange: master version 4 debug1: mux_client_request_session: master session id: 2 debug2: Received exit status from master 0 Shared connection to 10.31.11.27 closed. TASK [Verify that PVs fill the whole devices when they should] ***************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:78 Saturday 28 March 2026 19:31:27 -0400 (0:00:00.437) 0:09:02.907 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Check MD RAID] *********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:88 Saturday 28 March 2026 19:31:27 -0400 (0:00:00.026) 0:09:02.934 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node12 TASK [Get information about RAID] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8 Saturday 28 March 2026 19:31:27 -0400 (0:00:00.069) 0:09:03.004 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14 Saturday 28 March 2026 19:31:27 -0400 (0:00:00.034) 0:09:03.038 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19 Saturday 28 March 2026 19:31:27 -0400 (0:00:00.033) 0:09:03.072 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24 Saturday 28 March 2026 19:31:28 -0400 (0:00:00.033) 0:09:03.106 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set md chunk size regex] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29 Saturday 28 March 2026 19:31:28 -0400 (0:00:00.032) 0:09:03.138 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37 Saturday 28 March 2026 19:31:28 -0400 (0:00:00.020) 0:09:03.159 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46 Saturday 28 March 2026 19:31:28 -0400 (0:00:00.022) 0:09:03.182 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55 Saturday 28 March 2026 19:31:28 -0400 (0:00:00.023) 0:09:03.206 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64 Saturday 28 March 2026 19:31:28 -0400 (0:00:00.020) 0:09:03.226 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74 Saturday 28 March 2026 19:31:28 -0400 (0:00:00.027) 0:09:03.253 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83 Saturday 28 March 2026 19:31:28 -0400 (0:00:00.023) 0:09:03.277 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_chunk_size_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:91 Saturday 28 March 2026 19:31:28 -0400 (0:00:00.048) 0:09:03.326 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node12 TASK [Validate pool member LVM RAID settings] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2 Saturday 28 March 2026 19:31:28 -0400 (0:00:00.112) 0:09:03.438 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Check Thin Pools] ******************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:94 Saturday 28 March 2026 19:31:28 -0400 (0:00:00.031) 0:09:03.469 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node12 TASK [Validate pool member thinpool settings] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2 Saturday 28 March 2026 19:31:28 -0400 (0:00:00.053) 0:09:03.523 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Check member encryption] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:97 Saturday 28 March 2026 19:31:28 -0400 (0:00:00.022) 0:09:03.545 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node12 TASK [Set test variables] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5 Saturday 28 March 2026 19:31:28 -0400 (0:00:00.079) 0:09:03.625 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10 Saturday 28 March 2026 19:31:28 -0400 (0:00:00.040) 0:09:03.665 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Validate pool member crypttab entries] *********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17 Saturday 28 March 2026 19:31:28 -0400 (0:00:00.024) 0:09:03.690 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24 Saturday 28 March 2026 19:31:28 -0400 (0:00:00.021) 0:09:03.712 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:100 Saturday 28 March 2026 19:31:28 -0400 (0:00:00.034) 0:09:03.747 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node12 TASK [Validate pool member VDO settings] *************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2 Saturday 28 March 2026 19:31:28 -0400 (0:00:00.133) 0:09:03.880 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Check Stratis] *********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:103 Saturday 28 March 2026 19:31:28 -0400 (0:00:00.047) 0:09:03.928 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node12 TASK [Get stratis pool information] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6 Saturday 28 March 2026 19:31:28 -0400 (0:00:00.152) 0:09:04.081 ******** ok: [managed-node12] => { "changed": false, "rc": 0 } STDOUT: {"name": "foo", "encrypted": false, "key_desc": null, "clevis_pin": null, "clevis_args": {}} STDERR: OpenSSH_9.9p1, OpenSSL 3.5.5 27 Jan 2026 debug1: Reading configuration data /root/.ssh/config debug1: Reading configuration data /etc/ssh/ssh_config debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf debug2: checking match for 'final all' host 10.31.11.27 originally 10.31.11.27 debug2: match not found debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config debug1: configuration requests final Match pass debug2: resolve_canonicalize: hostname 10.31.11.27 is address debug1: re-parsing configuration debug1: Reading configuration data /root/.ssh/config debug1: Reading configuration data /etc/ssh/ssh_config debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf debug2: checking match for 'final all' host 10.31.11.27 originally 10.31.11.27 debug2: match found debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config debug1: auto-mux: Trying existing master at '/root/.ansible/cp/e1141b92e1' debug2: fd 3 setting O_NONBLOCK debug2: mux_client_hello_exchange: master version 4 debug1: mux_client_request_session: master session id: 2 debug2: Received exit status from master 0 Shared connection to 10.31.11.27 closed. TASK [Print script output] ***************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15 Saturday 28 March 2026 19:31:29 -0400 (0:00:00.570) 0:09:04.652 ******** ok: [managed-node12] => {} MSG: {'name': 'foo', 'encrypted': False, 'key_desc': None, 'clevis_pin': None, 'clevis_args': {}} TASK [Get information about Stratis] ******************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:19 Saturday 28 March 2026 19:31:29 -0400 (0:00:00.077) 0:09:04.729 ******** ok: [managed-node12] => { "ansible_facts": { "_stratis_pool_info": { "clevis_args": {}, "clevis_pin": null, "encrypted": false, "key_desc": null, "name": "foo" } }, "changed": false } TASK [Verify that the pools was created] *************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:23 Saturday 28 March 2026 19:31:29 -0400 (0:00:00.072) 0:09:04.801 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Verify that encryption is correctly set] ********************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:30 Saturday 28 March 2026 19:31:29 -0400 (0:00:00.058) 0:09:04.859 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.encryption", "skip_reason": "Conditional result was False" } TASK [Verify that Clevis/Tang encryption is correctly set] ********************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:39 Saturday 28 March 2026 19:31:29 -0400 (0:00:00.065) 0:09:04.925 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.encryption", "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:49 Saturday 28 March 2026 19:31:29 -0400 (0:00:00.077) 0:09:05.002 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_stratis_report": null }, "changed": false } TASK [Clean up test variables] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:106 Saturday 28 March 2026 19:31:29 -0400 (0:00:00.066) 0:09:05.069 ******** ok: [managed-node12] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3 Saturday 28 March 2026 19:31:30 -0400 (0:00:00.044) 0:09:05.114 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:43 Saturday 28 March 2026 19:31:30 -0400 (0:00:00.025) 0:09:05.139 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Clean up variable namespace] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:52 Saturday 28 March 2026 19:31:30 -0400 (0:00:00.026) 0:09:05.166 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Clean up - 3] ************************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:217 Saturday 28 March 2026 19:31:30 -0400 (0:00:00.035) 0:09:05.201 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tasks/run_role_with_clear_facts.yml for managed-node12 TASK [Clear facts] ************************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tasks/run_role_with_clear_facts.yml:10 Saturday 28 March 2026 19:31:30 -0400 (0:00:00.046) 0:09:05.247 ******** META: facts cleared TASK [Run the role] ************************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tasks/run_role_with_clear_facts.yml:24 Saturday 28 March 2026 19:31:30 -0400 (0:00:00.006) 0:09:05.254 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "__sr_failed_when is defined", "skip_reason": "Conditional result was False" } TASK [Run the role normally] *************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tasks/run_role_with_clear_facts.yml:34 Saturday 28 March 2026 19:31:30 -0400 (0:00:00.030) 0:09:05.285 ******** included: fedora.linux_system_roles.storage for managed-node12 TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Saturday 28 March 2026 19:31:30 -0400 (0:00:00.045) 0:09:05.331 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node12 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Saturday 28 March 2026 19:31:30 -0400 (0:00:00.044) 0:09:05.375 ******** ok: [managed-node12] TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Saturday 28 March 2026 19:31:31 -0400 (0:00:01.114) 0:09:06.489 ******** skipping: [managed-node12] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node12] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node12] => (item=CentOS_10.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-fs", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "xfsprogs", "stratisd", "stratis-cli", "{{ 'libblockdev-s390' if ansible_facts['architecture'] == 's390x' else 'libblockdev' }}", "vdo" ] }, "ansible_included_var_files": [ "/tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node12] => (item=CentOS_10.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-fs", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "xfsprogs", "stratisd", "stratis-cli", "{{ 'libblockdev-s390' if ansible_facts['architecture'] == 's390x' else 'libblockdev' }}", "vdo" ] }, "ansible_included_var_files": [ "/tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Saturday 28 March 2026 19:31:31 -0400 (0:00:00.136) 0:09:06.626 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "not __storage_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Saturday 28 March 2026 19:31:31 -0400 (0:00:00.052) 0:09:06.678 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "not __storage_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Saturday 28 March 2026 19:31:31 -0400 (0:00:00.035) 0:09:06.714 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Saturday 28 March 2026 19:31:31 -0400 (0:00:00.056) 0:09:06.770 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Saturday 28 March 2026 19:31:31 -0400 (0:00:00.037) 0:09:06.807 ******** redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node12 TASK [fedora.linux_system_roles.storage : Add repo key] ************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Saturday 28 March 2026 19:31:31 -0400 (0:00:00.086) 0:09:06.894 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_blivet_custom_repo.key is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Add blivet repo] ********************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:15 Saturday 28 March 2026 19:31:31 -0400 (0:00:00.038) 0:09:06.933 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_blivet_custom_repo.key is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:20 Saturday 28 March 2026 19:31:31 -0400 (0:00:00.035) 0:09:06.969 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:27 Saturday 28 March 2026 19:31:31 -0400 (0:00:00.063) 0:09:07.033 ******** ok: [managed-node12] => { "storage_pools | d([])": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "name": "foo", "state": "absent", "type": "stratis", "volumes": [ { "mount_point": "/opt/test1", "name": "test1", "size": "4g", "state": "absent" } ] } ] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:32 Saturday 28 March 2026 19:31:31 -0400 (0:00:00.046) 0:09:07.079 ******** ok: [managed-node12] => { "storage_volumes | d([])": [] } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37 Saturday 28 March 2026 19:31:32 -0400 (0:00:00.039) 0:09:07.118 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:50 Saturday 28 March 2026 19:31:32 -0400 (0:00:00.058) 0:09:07.177 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:56 Saturday 28 March 2026 19:31:32 -0400 (0:00:00.062) 0:09:07.239 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:76 Saturday 28 March 2026 19:31:32 -0400 (0:00:00.064) 0:09:07.304 ******** ok: [managed-node12] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "apt-daily.service": { "name": "apt-daily.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fcoe.service": { "name": "fcoe.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "iscsi-shutdown.service": { "name": "iscsi-shutdown.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsi.service": { "name": "iscsi.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsid.service": { "name": "iscsid.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-activation-early.service": { "name": "lvm2-activation-early.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_multipath.service": { "name": "modprobe@dm_multipath.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "raid-check.service": { "name": "raid-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "rbdmap.service": { "name": "rbdmap.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-unix-local@.service": { "name": "sshd-unix-local@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd-vsock@.service": { "name": "sshd-vsock@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "stratis-fstab-setup-with-network@.service": { "name": "stratis-fstab-setup-with-network@.service", "source": "systemd", "state": "unknown", "status": "static" }, "stratis-fstab-setup@.service": { "name": "stratis-fstab-setup@.service", "source": "systemd", "state": "unknown", "status": "static" }, "stratisd-min-postinitrd.service": { "name": "stratisd-min-postinitrd.service", "source": "systemd", "state": "inactive", "status": "static" }, "stratisd.service": { "name": "stratisd.service", "source": "systemd", "state": "running", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-userdbd.service": { "name": "systemd-userdbd.service", "source": "systemd", "state": "running", "status": "indirect" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:82 Saturday 28 March 2026 19:31:34 -0400 (0:00:02.193) 0:09:09.498 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:88 Saturday 28 March 2026 19:31:34 -0400 (0:00:00.073) 0:09:09.571 ******** changed: [managed-node12] => { "actions": [ { "action": "destroy device", "device": "/dev/stratis/foo", "fs_type": null }, { "action": "destroy format", "device": "/dev/sda", "fs_type": "stratis" }, { "action": "destroy format", "device": "/dev/sdb", "fs_type": "stratis" } ], "changed": true, "crypts": [], "leaves": [ "/dev/sda", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/xvda2" ], "mounts": [], "packages": [ "xfsprogs" ], "pools": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "stratis", "volumes": [ { "_device": "", "_mount_id": "", "_raw_device": "", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:103 Saturday 28 March 2026 19:31:40 -0400 (0:00:06.149) 0:09:15.721 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_udevadm_trigger | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ****** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:110 Saturday 28 March 2026 19:31:40 -0400 (0:00:00.032) 0:09:15.754 ******** ok: [managed-node12] => { "changed": false, "stat": { "atime": 1774740645.8607492, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "6aeccfbb3223a634b983c3c21792c1ba90809bb8", "ctime": 1774740641.1286864, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 230686985, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1774740641.1286864, "nlink": 1, "path": "/etc/fstab", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1408, "uid": 0, "version": "1679064280", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:115 Saturday 28 March 2026 19:31:41 -0400 (0:00:00.409) 0:09:16.163 ******** ok: [managed-node12] => { "backup": "", "changed": false } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:133 Saturday 28 March 2026 19:31:41 -0400 (0:00:00.384) 0:09:16.548 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:139 Saturday 28 March 2026 19:31:41 -0400 (0:00:00.030) 0:09:16.578 ******** ok: [managed-node12] => { "blivet_output": { "actions": [ { "action": "destroy device", "device": "/dev/stratis/foo", "fs_type": null }, { "action": "destroy format", "device": "/dev/sda", "fs_type": "stratis" }, { "action": "destroy format", "device": "/dev/sdb", "fs_type": "stratis" } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/sda", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/xvda2" ], "mounts": [], "packages": [ "xfsprogs" ], "pools": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "stratis", "volumes": [ { "_device": "", "_mount_id": "", "_raw_device": "", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148 Saturday 28 March 2026 19:31:41 -0400 (0:00:00.027) 0:09:16.605 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "stratis", "volumes": [ { "_device": "", "_mount_id": "", "_raw_device": "", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:152 Saturday 28 March 2026 19:31:41 -0400 (0:00:00.025) 0:09:16.630 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:168 Saturday 28 March 2026 19:31:41 -0400 (0:00:00.024) 0:09:16.654 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:179 Saturday 28 March 2026 19:31:41 -0400 (0:00:00.018) 0:09:16.673 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "blivet_output['mounts'] | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:184 Saturday 28 March 2026 19:31:41 -0400 (0:00:00.021) 0:09:16.694 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195 Saturday 28 March 2026 19:31:41 -0400 (0:00:00.021) 0:09:16.716 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:207 Saturday 28 March 2026 19:31:41 -0400 (0:00:00.022) 0:09:16.738 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "blivet_output['mounts'] | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:215 Saturday 28 March 2026 19:31:41 -0400 (0:00:00.038) 0:09:16.776 ******** ok: [managed-node12] => { "changed": false, "stat": { "atime": 1774737762.6954868, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1774342704.475, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 4194435, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1774342323.629, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "1224473831", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:220 Saturday 28 March 2026 19:31:42 -0400 (0:00:00.446) 0:09:17.223 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:242 Saturday 28 March 2026 19:31:42 -0400 (0:00:00.058) 0:09:17.282 ******** ok: [managed-node12] TASK [Verify role results - 10] ************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:231 Saturday 28 March 2026 19:31:43 -0400 (0:00:01.038) 0:09:18.320 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node12 TASK [Print out pool information] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2 Saturday 28 March 2026 19:31:43 -0400 (0:00:00.032) 0:09:18.353 ******** ok: [managed-node12] => { "_storage_pools_list": [ { "disks": [ "sda", "sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh", "sdi" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "stratis", "volumes": [ { "_device": "", "_mount_id": "", "_raw_device": "", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } ] } ] } TASK [Print out volume information] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7 Saturday 28 March 2026 19:31:43 -0400 (0:00:00.027) 0:09:18.381 ******** skipping: [managed-node12] => { "false_condition": "_storage_volumes_list | length > 0" } TASK [Collect info about the volumes.] ***************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15 Saturday 28 March 2026 19:31:43 -0400 (0:00:00.022) 0:09:18.403 ******** ok: [managed-node12] => { "changed": false, "info": { "/dev/loop0": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/loop0", "size": "0B", "type": "loop", "uuid": "" }, "/dev/sda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdb": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda1", "size": "1M", "type": "partition", "uuid": "" }, "/dev/xvda2": { "fstype": "xfs", "label": "", "mountpoint": "/", "name": "/dev/xvda2", "size": "250G", "type": "partition", "uuid": "94fc577e-f1df-44bb-8e86-63b9a68f8f7f" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20 Saturday 28 March 2026 19:31:43 -0400 (0:00:00.374) 0:09:18.778 ******** ok: [managed-node12] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.003143", "end": "2026-03-28 19:31:43.996048", "rc": 0, "start": "2026-03-28 19:31:43.992905" } STDOUT: # system_role:storage # # /etc/fstab # Created by anaconda on Tue Mar 24 08:52:03 2026 # # Accessible filesystems, by reference, are maintained under '/dev/disk/'. # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info. # # After editing this file, run 'systemctl daemon-reload' to update systemd # units generated from this file. # UUID=94fc577e-f1df-44bb-8e86-63b9a68f8f7f / xfs defaults 0 0 ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25 Saturday 28 March 2026 19:31:44 -0400 (0:00:00.371) 0:09:19.149 ******** ok: [managed-node12] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.002918", "end": "2026-03-28 19:31:44.364102", "failed_when_result": false, "rc": 0, "start": "2026-03-28 19:31:44.361184" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34 Saturday 28 March 2026 19:31:44 -0400 (0:00:00.367) 0:09:19.517 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node12 => (item={'disks': ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf', 'sdg', 'sdh', 'sdi'], 'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'absent', 'type': 'stratis', 'volumes': [{'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'part_type': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '', '_raw_device': '', '_mount_id': ''}]}) TASK [Set _storage_pool_tests] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5 Saturday 28 March 2026 19:31:44 -0400 (0:00:00.039) 0:09:19.557 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Get VG shared value status] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18 Saturday 28 March 2026 19:31:44 -0400 (0:00:00.021) 0:09:19.579 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'", "skip_reason": "Conditional result was False" } TASK [Verify that VG shared value checks out] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24 Saturday 28 March 2026 19:31:44 -0400 (0:00:00.019) 0:09:19.598 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'", "skip_reason": "Conditional result was False" } TASK [Verify pool subset] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34 Saturday 28 March 2026 19:31:44 -0400 (0:00:00.020) 0:09:19.619 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node12 => (item=members) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node12 => (item=volumes) TASK [Set test variables] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2 Saturday 28 March 2026 19:31:44 -0400 (0:00:00.055) 0:09:19.674 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Get the canonical device path for each member device] ******************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8 Saturday 28 March 2026 19:31:44 -0400 (0:00:00.024) 0:09:19.698 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Set pvs lvm length] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17 Saturday 28 March 2026 19:31:44 -0400 (0:00:00.022) 0:09:19.720 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Set pool pvs] ************************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22 Saturday 28 March 2026 19:31:44 -0400 (0:00:00.037) 0:09:19.758 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Verify PV count] ********************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27 Saturday 28 March 2026 19:31:44 -0400 (0:00:00.032) 0:09:19.791 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Set expected pv type] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36 Saturday 28 March 2026 19:31:44 -0400 (0:00:00.044) 0:09:19.836 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Set expected pv type - 2] ************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41 Saturday 28 March 2026 19:31:44 -0400 (0:00:00.039) 0:09:19.875 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm' and not storage_test_pool.encryption", "skip_reason": "Conditional result was False" } TASK [Set expected pv type - 3] ************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46 Saturday 28 March 2026 19:31:44 -0400 (0:00:00.033) 0:09:19.908 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:55 Saturday 28 March 2026 19:31:44 -0400 (0:00:00.034) 0:09:19.943 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Check that blivet supports PV grow to fill] ****************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:68 Saturday 28 March 2026 19:31:44 -0400 (0:00:00.022) 0:09:19.965 ******** ok: [managed-node12] => { "changed": false, "failed_when_result": false, "rc": 0 } STDERR: OpenSSH_9.9p1, OpenSSL 3.5.5 27 Jan 2026 debug1: Reading configuration data /root/.ssh/config debug1: Reading configuration data /etc/ssh/ssh_config debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf debug2: checking match for 'final all' host 10.31.11.27 originally 10.31.11.27 debug2: match not found debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config debug1: configuration requests final Match pass debug2: resolve_canonicalize: hostname 10.31.11.27 is address debug1: re-parsing configuration debug1: Reading configuration data /root/.ssh/config debug1: Reading configuration data /etc/ssh/ssh_config debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf debug2: checking match for 'final all' host 10.31.11.27 originally 10.31.11.27 debug2: match found debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config debug1: auto-mux: Trying existing master at '/root/.ansible/cp/e1141b92e1' debug2: fd 3 setting O_NONBLOCK debug2: mux_client_hello_exchange: master version 4 debug1: mux_client_request_session: master session id: 2 debug2: Received exit status from master 0 Shared connection to 10.31.11.27 closed. TASK [Verify that PVs fill the whole devices when they should] ***************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:78 Saturday 28 March 2026 19:31:45 -0400 (0:00:00.448) 0:09:20.414 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Check MD RAID] *********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:88 Saturday 28 March 2026 19:31:45 -0400 (0:00:00.018) 0:09:20.433 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node12 TASK [Get information about RAID] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8 Saturday 28 March 2026 19:31:45 -0400 (0:00:00.039) 0:09:20.472 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14 Saturday 28 March 2026 19:31:45 -0400 (0:00:00.019) 0:09:20.491 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19 Saturday 28 March 2026 19:31:45 -0400 (0:00:00.018) 0:09:20.510 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24 Saturday 28 March 2026 19:31:45 -0400 (0:00:00.018) 0:09:20.528 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set md chunk size regex] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29 Saturday 28 March 2026 19:31:45 -0400 (0:00:00.022) 0:09:20.551 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37 Saturday 28 March 2026 19:31:45 -0400 (0:00:00.032) 0:09:20.583 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46 Saturday 28 March 2026 19:31:45 -0400 (0:00:00.038) 0:09:20.621 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55 Saturday 28 March 2026 19:31:45 -0400 (0:00:00.040) 0:09:20.661 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64 Saturday 28 March 2026 19:31:45 -0400 (0:00:00.037) 0:09:20.699 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74 Saturday 28 March 2026 19:31:45 -0400 (0:00:00.047) 0:09:20.746 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83 Saturday 28 March 2026 19:31:45 -0400 (0:00:00.035) 0:09:20.782 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_chunk_size_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:91 Saturday 28 March 2026 19:31:45 -0400 (0:00:00.039) 0:09:20.821 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node12 TASK [Validate pool member LVM RAID settings] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2 Saturday 28 March 2026 19:31:45 -0400 (0:00:00.077) 0:09:20.899 ******** skipping: [managed-node12] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'part_type': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '', '_raw_device': '', '_mount_id': ''}) => { "ansible_loop_var": "storage_test_lvmraid_volume", "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False", "storage_test_lvmraid_volume": { "_device": "", "_mount_id": "", "_raw_device": "", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } } skipping: [managed-node12] => { "changed": false } MSG: All items skipped TASK [Check Thin Pools] ******************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:94 Saturday 28 March 2026 19:31:45 -0400 (0:00:00.046) 0:09:20.945 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node12 TASK [Validate pool member thinpool settings] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2 Saturday 28 March 2026 19:31:45 -0400 (0:00:00.070) 0:09:21.016 ******** skipping: [managed-node12] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'part_type': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '', '_raw_device': '', '_mount_id': ''}) => { "ansible_loop_var": "storage_test_thin_volume", "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False", "storage_test_thin_volume": { "_device": "", "_mount_id": "", "_raw_device": "", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } } skipping: [managed-node12] => { "changed": false } MSG: All items skipped TASK [Check member encryption] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:97 Saturday 28 March 2026 19:31:45 -0400 (0:00:00.049) 0:09:21.065 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node12 TASK [Set test variables] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5 Saturday 28 March 2026 19:31:46 -0400 (0:00:00.094) 0:09:21.159 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10 Saturday 28 March 2026 19:31:46 -0400 (0:00:00.045) 0:09:21.205 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Validate pool member crypttab entries] *********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17 Saturday 28 March 2026 19:31:46 -0400 (0:00:00.017) 0:09:21.223 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24 Saturday 28 March 2026 19:31:46 -0400 (0:00:00.023) 0:09:21.246 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:100 Saturday 28 March 2026 19:31:46 -0400 (0:00:00.025) 0:09:21.271 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node12 TASK [Validate pool member VDO settings] *************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2 Saturday 28 March 2026 19:31:46 -0400 (0:00:00.057) 0:09:21.329 ******** skipping: [managed-node12] => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'part_type': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '', '_raw_device': '', '_mount_id': ''}) => { "ansible_loop_var": "storage_test_vdo_volume", "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False", "storage_test_vdo_volume": { "_device": "", "_mount_id": "", "_raw_device": "", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/opt/test1", "mount_user": null, "name": "test1", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "4g", "state": "absent", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "stratis", "vdo_pool_size": null } } skipping: [managed-node12] => { "changed": false } MSG: All items skipped TASK [Check Stratis] *********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:103 Saturday 28 March 2026 19:31:46 -0400 (0:00:00.032) 0:09:21.362 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node12 TASK [Get stratis pool information] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6 Saturday 28 March 2026 19:31:46 -0400 (0:00:00.050) 0:09:21.413 ******** ok: [managed-node12] => { "changed": false, "rc": 0 } STDOUT: null STDERR: OpenSSH_9.9p1, OpenSSL 3.5.5 27 Jan 2026 debug1: Reading configuration data /root/.ssh/config debug1: Reading configuration data /etc/ssh/ssh_config debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf debug2: checking match for 'final all' host 10.31.11.27 originally 10.31.11.27 debug2: match not found debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config debug1: configuration requests final Match pass debug2: resolve_canonicalize: hostname 10.31.11.27 is address debug1: re-parsing configuration debug1: Reading configuration data /root/.ssh/config debug1: Reading configuration data /etc/ssh/ssh_config debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf debug2: checking match for 'final all' host 10.31.11.27 originally 10.31.11.27 debug2: match found debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config debug1: auto-mux: Trying existing master at '/root/.ansible/cp/e1141b92e1' debug2: fd 3 setting O_NONBLOCK debug2: mux_client_hello_exchange: master version 4 debug1: mux_client_request_session: master session id: 2 debug2: Received exit status from master 0 Shared connection to 10.31.11.27 closed. TASK [Print script output] ***************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15 Saturday 28 March 2026 19:31:46 -0400 (0:00:00.564) 0:09:21.977 ******** ok: [managed-node12] => {} MSG: null TASK [Get information about Stratis] ******************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:19 Saturday 28 March 2026 19:31:46 -0400 (0:00:00.031) 0:09:22.008 ******** ok: [managed-node12] => { "ansible_facts": { "_stratis_pool_info": "" }, "changed": false } TASK [Verify that the pools was created] *************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:23 Saturday 28 March 2026 19:31:46 -0400 (0:00:00.028) 0:09:22.036 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.state == 'present'", "skip_reason": "Conditional result was False" } TASK [Verify that encryption is correctly set] ********************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:30 Saturday 28 March 2026 19:31:46 -0400 (0:00:00.024) 0:09:22.061 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.state == 'present'", "skip_reason": "Conditional result was False" } TASK [Verify that Clevis/Tang encryption is correctly set] ********************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:39 Saturday 28 March 2026 19:31:46 -0400 (0:00:00.026) 0:09:22.087 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.state == 'present'", "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:49 Saturday 28 March 2026 19:31:47 -0400 (0:00:00.022) 0:09:22.110 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_stratis_report": null }, "changed": false } TASK [Clean up test variables] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:106 Saturday 28 March 2026 19:31:47 -0400 (0:00:00.024) 0:09:22.135 ******** ok: [managed-node12] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3 Saturday 28 March 2026 19:31:47 -0400 (0:00:00.022) 0:09:22.157 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml for managed-node12 => (item={'encryption': False, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'fs_create_options': '', 'fs_label': '', 'fs_type': 'xfs', 'mount_options': 'defaults', 'mount_point': '/opt/test1', 'mount_user': None, 'mount_group': None, 'mount_mode': None, 'name': 'test1', 'raid_level': None, 'size': '4g', 'state': 'absent', 'type': 'stratis', 'cached': False, 'cache_devices': [], 'cache_mode': None, 'cache_size': 0, 'compression': None, 'deduplication': None, 'part_type': None, 'raid_disks': [], 'raid_stripe_size': None, 'thin_pool_name': None, 'thin_pool_size': None, 'thin': False, 'vdo_pool_size': None, 'disks': [], 'fs_overwrite_existing': True, 'mount_check': 0, 'mount_passno': 0, 'mount_device_identifier': 'uuid', 'raid_device_count': None, 'raid_spare_count': None, 'raid_chunk_size': None, 'raid_metadata_version': None, '_device': '', '_raw_device': '', '_mount_id': ''}) TASK [Set storage volume test variables] *************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:2 Saturday 28 March 2026 19:31:47 -0400 (0:00:00.035) 0:09:22.192 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_volume_present": false, "_storage_volume_tests": [ "mount", "fstab", "fs", "device", "encryption", "md", "size", "cache" ] }, "changed": false } TASK [Run test verify for storage_test_volume_subset] ************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:19 Saturday 28 March 2026 19:31:47 -0400 (0:00:00.029) 0:09:22.222 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml for managed-node12 => (item=mount) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml for managed-node12 => (item=fstab) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml for managed-node12 => (item=fs) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml for managed-node12 => (item=device) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml for managed-node12 => (item=encryption) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml for managed-node12 => (item=md) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml for managed-node12 => (item=size) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml for managed-node12 => (item=cache) TASK [Get expected mount device based on device type] ************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:7 Saturday 28 March 2026 19:31:47 -0400 (0:00:00.191) 0:09:22.413 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_device_path": "" }, "changed": false } TASK [Set some facts] ********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:11 Saturday 28 March 2026 19:31:47 -0400 (0:00:00.087) 0:09:22.501 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_mount_expected_mount_point": "/opt/test1", "storage_test_swap_expected_matches": "0" }, "changed": false } TASK [Get information about the mountpoint directory] ************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:19 Saturday 28 March 2026 19:31:47 -0400 (0:00:00.099) 0:09:22.600 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present | bool", "skip_reason": "Conditional result was False" } TASK [Verify the current mount state by device] ******************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:32 Saturday 28 March 2026 19:31:47 -0400 (0:00:00.043) 0:09:22.644 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Verify mount directory user] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:40 Saturday 28 March 2026 19:31:47 -0400 (0:00:00.044) 0:09:22.688 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Verify mount directory group] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:51 Saturday 28 March 2026 19:31:47 -0400 (0:00:00.041) 0:09:22.730 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Verify mount directory permissions] ************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:62 Saturday 28 March 2026 19:31:47 -0400 (0:00:00.044) 0:09:22.774 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Get path of test volume device] ****************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:76 Saturday 28 March 2026 19:31:47 -0400 (0:00:00.035) 0:09:22.810 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Gather swap info] ******************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:82 Saturday 28 March 2026 19:31:47 -0400 (0:00:00.036) 0:09:22.846 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Verify swap status] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:88 Saturday 28 March 2026 19:31:47 -0400 (0:00:00.034) 0:09:22.881 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.fs_type == \"swap\"", "skip_reason": "Conditional result was False" } TASK [Unset facts] ************************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-mount.yml:98 Saturday 28 March 2026 19:31:47 -0400 (0:00:00.054) 0:09:22.936 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_found_mount_stat": null, "storage_test_mount_expected_mount_point": null, "storage_test_swap_expected_matches": null, "storage_test_swaps": null, "storage_test_sys_node": null }, "changed": false } TASK [Set some variables for fstab checking] *********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:2 Saturday 28 March 2026 19:31:47 -0400 (0:00:00.067) 0:09:23.003 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": "0", "storage_test_fstab_expected_mount_options_matches": "0", "storage_test_fstab_expected_mount_point_matches": "0", "storage_test_fstab_id_matches": [], "storage_test_fstab_mount_options_matches": [], "storage_test_fstab_mount_point_matches": [] }, "changed": false } TASK [Verify that the device identifier appears in /etc/fstab] ***************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:17 Saturday 28 March 2026 19:31:47 -0400 (0:00:00.084) 0:09:23.088 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present | bool", "skip_reason": "Conditional result was False" } TASK [Verify the fstab mount point] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:24 Saturday 28 March 2026 19:31:48 -0400 (0:00:00.035) 0:09:23.123 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Verify mount_options] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:33 Saturday 28 March 2026 19:31:48 -0400 (0:00:00.065) 0:09:23.189 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "__storage_verify_mount_options | d(false)", "skip_reason": "Conditional result was False" } TASK [Verify fingerprint] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:45 Saturday 28 March 2026 19:31:48 -0400 (0:00:00.037) 0:09:23.227 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Clean up variables] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fstab.yml:52 Saturday 28 March 2026 19:31:48 -0400 (0:00:00.042) 0:09:23.269 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_fstab_expected_id_matches": null, "storage_test_fstab_expected_mount_options_matches": null, "storage_test_fstab_expected_mount_point_matches": null, "storage_test_fstab_id_matches": null, "storage_test_fstab_mount_options_matches": null, "storage_test_fstab_mount_point_matches": null }, "changed": false } TASK [Verify fs type] ********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:6 Saturday 28 March 2026 19:31:48 -0400 (0:00:00.038) 0:09:23.307 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type != \"stratis\"", "skip_reason": "Conditional result was False" } TASK [Verify fs label] ********************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-fs.yml:14 Saturday 28 March 2026 19:31:48 -0400 (0:00:00.033) 0:09:23.340 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type != \"stratis\"", "skip_reason": "Conditional result was False" } TASK [See whether the device node is present] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:3 Saturday 28 March 2026 19:31:48 -0400 (0:00:00.031) 0:09:23.372 ******** ok: [managed-node12] => { "changed": false, "stat": { "exists": false } } TASK [Verify the presence/absence of the device node] ************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:9 Saturday 28 March 2026 19:31:48 -0400 (0:00:00.463) 0:09:23.836 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present or storage_test_volume.type == 'disk'", "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the device node - 2] ********************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:16 Saturday 28 March 2026 19:31:48 -0400 (0:00:00.034) 0:09:23.870 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Make sure we got info about this volume] ********************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:23 Saturday 28 March 2026 19:31:48 -0400 (0:00:00.037) 0:09:23.908 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Process volume type (set initial value) (1/2)] *************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:29 Saturday 28 March 2026 19:31:48 -0400 (0:00:00.022) 0:09:23.930 ******** ok: [managed-node12] => { "ansible_facts": { "st_volume_type": "stratis" }, "changed": false } TASK [Process volume type (get RAID value) (2/2)] ****************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:33 Saturday 28 March 2026 19:31:48 -0400 (0:00:00.036) 0:09:23.966 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == \"raid\"", "skip_reason": "Conditional result was False" } TASK [Verify the volume's device type] ***************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-device.yml:38 Saturday 28 March 2026 19:31:48 -0400 (0:00:00.037) 0:09:24.004 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Stat the LUKS device, if encrypted] ************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:3 Saturday 28 March 2026 19:31:48 -0400 (0:00:00.033) 0:09:24.037 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Ensure cryptsetup is present] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:10 Saturday 28 March 2026 19:31:48 -0400 (0:00:00.022) 0:09:24.060 ******** ok: [managed-node12] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: cryptsetup TASK [Collect LUKS info for this volume] *************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:16 Saturday 28 March 2026 19:31:49 -0400 (0:00:00.826) 0:09:24.887 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.encryption and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Verify the presence/absence of the LUKS device node] ********************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:22 Saturday 28 March 2026 19:31:49 -0400 (0:00:00.031) 0:09:24.918 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Verify that the raw device is the same as the device if not encrypted] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:29 Saturday 28 March 2026 19:31:49 -0400 (0:00:00.030) 0:09:24.949 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Make sure we got info about the LUKS volume if encrypted] **************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:40 Saturday 28 March 2026 19:31:49 -0400 (0:00:00.058) 0:09:25.008 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Verify the LUKS volume's device type if encrypted] *********************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:46 Saturday 28 March 2026 19:31:49 -0400 (0:00:00.078) 0:09:25.086 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present and storage_test_volume.encryption", "skip_reason": "Conditional result was False" } TASK [Check LUKS version] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:51 Saturday 28 March 2026 19:31:50 -0400 (0:00:00.070) 0:09:25.156 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Check LUKS key size] ***************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:64 Saturday 28 March 2026 19:31:50 -0400 (0:00:00.074) 0:09:25.231 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Check LUKS cipher] ******************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:77 Saturday 28 March 2026 19:31:50 -0400 (0:00:00.065) 0:09:25.296 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Set test variables] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:90 Saturday 28 March 2026 19:31:50 -0400 (0:00:00.062) 0:09:25.359 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_crypttab_entries": [], "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Check for /etc/crypttab entry] ******************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:96 Saturday 28 March 2026 19:31:50 -0400 (0:00:00.074) 0:09:25.433 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Validate the format of the crypttab entry] ******************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:103 Saturday 28 March 2026 19:31:50 -0400 (0:00:00.068) 0:09:25.501 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Check backing device of crypttab entry] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:111 Saturday 28 March 2026 19:31:50 -0400 (0:00:00.035) 0:09:25.537 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Check key file of crypttab entry] **************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:119 Saturday 28 March 2026 19:31:50 -0400 (0:00:00.025) 0:09:25.562 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_expected_crypttab_entries | int == 1", "skip_reason": "Conditional result was False" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-encryption.yml:127 Saturday 28 March 2026 19:31:50 -0400 (0:00:00.021) 0:09:25.583 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_expected_crypttab_entries": null, "_storage_test_expected_crypttab_key_file": null }, "changed": false } TASK [Get information about RAID] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:8 Saturday 28 March 2026 19:31:50 -0400 (0:00:00.027) 0:09:25.611 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:14 Saturday 28 March 2026 19:31:50 -0400 (0:00:00.025) 0:09:25.636 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:19 Saturday 28 March 2026 19:31:50 -0400 (0:00:00.020) 0:09:25.657 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:24 Saturday 28 March 2026 19:31:50 -0400 (0:00:00.022) 0:09:25.680 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Set chunk size regex] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:29 Saturday 28 March 2026 19:31:50 -0400 (0:00:00.020) 0:09:25.700 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:37 Saturday 28 March 2026 19:31:50 -0400 (0:00:00.021) 0:09:25.721 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:46 Saturday 28 March 2026 19:31:50 -0400 (0:00:00.021) 0:09:25.743 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:54 Saturday 28 March 2026 19:31:50 -0400 (0:00:00.031) 0:09:25.774 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:62 Saturday 28 March 2026 19:31:50 -0400 (0:00:00.028) 0:09:25.803 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-md.yml:70 Saturday 28 March 2026 19:31:50 -0400 (0:00:00.023) 0:09:25.826 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'raid'", "skip_reason": "Conditional result was False" } TASK [Parse the actual size of the volume] ************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:3 Saturday 28 March 2026 19:31:50 -0400 (0:00:00.021) 0:09:25.847 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present | bool", "skip_reason": "Conditional result was False" } TASK [Parse the requested size of the volume] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:11 Saturday 28 March 2026 19:31:50 -0400 (0:00:00.030) 0:09:25.877 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present | bool", "skip_reason": "Conditional result was False" } TASK [Establish base value for expected size] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:20 Saturday 28 March 2026 19:31:50 -0400 (0:00:00.024) 0:09:25.902 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present | bool", "skip_reason": "Conditional result was False" } TASK [Show expected size] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:28 Saturday 28 March 2026 19:31:50 -0400 (0:00:00.038) 0:09:25.940 ******** ok: [managed-node12] => { "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined" } TASK [Get the size of parent/pool device] ************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:32 Saturday 28 March 2026 19:31:50 -0400 (0:00:00.044) 0:09:25.985 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present | bool", "skip_reason": "Conditional result was False" } TASK [Show test pool] ********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:46 Saturday 28 March 2026 19:31:50 -0400 (0:00:00.029) 0:09:26.014 ******** skipping: [managed-node12] => { "false_condition": "_storage_test_volume_present | bool" } TASK [Show test blockinfo] ***************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:50 Saturday 28 March 2026 19:31:50 -0400 (0:00:00.047) 0:09:26.062 ******** skipping: [managed-node12] => { "false_condition": "_storage_test_volume_present | bool" } TASK [Show test pool size] ***************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:54 Saturday 28 March 2026 19:31:50 -0400 (0:00:00.036) 0:09:26.099 ******** skipping: [managed-node12] => { "false_condition": "_storage_test_volume_present | bool" } TASK [Calculate the expected size based on pool size and percentage value] ***** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:58 Saturday 28 March 2026 19:31:51 -0400 (0:00:00.042) 0:09:26.141 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present | bool", "skip_reason": "Conditional result was False" } TASK [Default thin pool reserved space values] ********************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:68 Saturday 28 March 2026 19:31:51 -0400 (0:00:00.053) 0:09:26.195 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Default minimal thin pool reserved space size] *************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:72 Saturday 28 March 2026 19:31:51 -0400 (0:00:00.077) 0:09:26.272 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Default maximal thin pool reserved space size] *************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:77 Saturday 28 March 2026 19:31:51 -0400 (0:00:00.041) 0:09:26.314 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Calculate maximum usable space in thin pool] ***************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:83 Saturday 28 March 2026 19:31:51 -0400 (0:00:00.035) 0:09:26.350 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Apply upper size limit to max usable thin pool space] ******************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:87 Saturday 28 March 2026 19:31:51 -0400 (0:00:00.027) 0:09:26.377 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Apply lower size limit to max usable thin pool space] ******************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:92 Saturday 28 March 2026 19:31:51 -0400 (0:00:00.031) 0:09:26.409 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Convert maximum usable thin pool space from int to Size] ***************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:97 Saturday 28 March 2026 19:31:51 -0400 (0:00:00.034) 0:09:26.443 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Show max thin pool size] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:102 Saturday 28 March 2026 19:31:51 -0400 (0:00:00.023) 0:09:26.467 ******** skipping: [managed-node12] => { "false_condition": "storage_test_volume.thin | bool" } TASK [Show volume thin pool size] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:106 Saturday 28 March 2026 19:31:51 -0400 (0:00:00.024) 0:09:26.492 ******** skipping: [managed-node12] => { "false_condition": "storage_test_volume.thin | bool" } TASK [Show test volume size] *************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:110 Saturday 28 March 2026 19:31:51 -0400 (0:00:00.022) 0:09:26.514 ******** skipping: [managed-node12] => { "false_condition": "storage_test_volume.thin | bool" } TASK [Establish base value for expected thin pool size] ************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:114 Saturday 28 March 2026 19:31:51 -0400 (0:00:00.020) 0:09:26.535 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Calculate the expected size based on pool size and percentage value - 2] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:121 Saturday 28 March 2026 19:31:51 -0400 (0:00:00.024) 0:09:26.560 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Establish base value for expected thin pool volume size] ***************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:128 Saturday 28 March 2026 19:31:51 -0400 (0:00:00.026) 0:09:26.586 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Calculate the expected thin pool volume size based on percentage value] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:132 Saturday 28 March 2026 19:31:51 -0400 (0:00:00.035) 0:09:26.622 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Replace expected volume size with calculated value] ********************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:138 Saturday 28 March 2026 19:31:51 -0400 (0:00:00.040) 0:09:26.663 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.thin | bool", "skip_reason": "Conditional result was False" } TASK [Show actual size] ******************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:144 Saturday 28 March 2026 19:31:51 -0400 (0:00:00.044) 0:09:26.708 ******** ok: [managed-node12] => { "storage_test_actual_size": { "changed": false, "false_condition": "_storage_test_volume_present | bool", "skip_reason": "Conditional result was False", "skipped": true } } TASK [Show expected size - 2] ************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:148 Saturday 28 March 2026 19:31:51 -0400 (0:00:00.038) 0:09:26.746 ******** ok: [managed-node12] => { "storage_test_expected_size": "VARIABLE IS NOT DEFINED!: 'storage_test_expected_size' is undefined" } TASK [Assert expected size is actual size] ************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-size.yml:152 Saturday 28 March 2026 19:31:51 -0400 (0:00:00.053) 0:09:26.800 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_storage_test_volume_present | bool", "skip_reason": "Conditional result was False" } TASK [Get information about the LV] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:5 Saturday 28 March 2026 19:31:51 -0400 (0:00:00.044) 0:09:26.844 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Set LV segment type] ***************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:13 Saturday 28 March 2026 19:31:51 -0400 (0:00:00.044) 0:09:26.889 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Check segment type] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:17 Saturday 28 March 2026 19:31:51 -0400 (0:00:00.030) 0:09:26.919 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Set LV cache size] ******************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:24 Saturday 28 March 2026 19:31:51 -0400 (0:00:00.029) 0:09:26.949 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Parse the requested cache size] ****************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:31 Saturday 28 March 2026 19:31:51 -0400 (0:00:00.029) 0:09:26.978 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Set expected cache size] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:37 Saturday 28 March 2026 19:31:51 -0400 (0:00:00.029) 0:09:27.008 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Check cache size] ******************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume-cache.yml:42 Saturday 28 March 2026 19:31:51 -0400 (0:00:00.034) 0:09:27.043 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_volume.type == 'lvm' and _storage_test_volume_present", "skip_reason": "Conditional result was False" } TASK [Clean up facts] ********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-volume.yml:25 Saturday 28 March 2026 19:31:51 -0400 (0:00:00.046) 0:09:27.089 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_volume_present": null }, "changed": false } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:43 Saturday 28 March 2026 19:31:52 -0400 (0:00:00.052) 0:09:27.142 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Clean up variable namespace] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:52 Saturday 28 March 2026 19:31:52 -0400 (0:00:00.030) 0:09:27.172 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Setup Tang server on localhost for testing] ****************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:234 Saturday 28 March 2026 19:31:52 -0400 (0:00:00.055) 0:09:27.227 ******** included: fedora.linux_system_roles.nbde_server for managed-node12 TASK [fedora.linux_system_roles.nbde_server : Set version specific variables] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main.yml:6 Saturday 28 March 2026 19:31:52 -0400 (0:00:00.090) 0:09:27.318 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/set_vars.yml for managed-node12 TASK [fedora.linux_system_roles.nbde_server : Ensure ansible_facts used by role] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/set_vars.yml:2 Saturday 28 March 2026 19:31:52 -0400 (0:00:00.072) 0:09:27.391 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "__nbde_server_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.nbde_server : Check if system is ostree] ******* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/set_vars.yml:10 Saturday 28 March 2026 19:31:52 -0400 (0:00:00.068) 0:09:27.459 ******** ok: [managed-node12] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.nbde_server : Set flag to indicate system is ostree] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/set_vars.yml:15 Saturday 28 March 2026 19:31:52 -0400 (0:00:00.437) 0:09:27.896 ******** ok: [managed-node12] => { "ansible_facts": { "__nbde_server_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.nbde_server : Run systemctl] ******************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/set_vars.yml:22 Saturday 28 March 2026 19:31:52 -0400 (0:00:00.084) 0:09:27.981 ******** ok: [managed-node12] => { "changed": false, "cmd": [ "systemctl", "is-system-running" ], "delta": "0:00:00.008730", "end": "2026-03-28 19:31:53.296178", "failed_when_result": false, "rc": 0, "start": "2026-03-28 19:31:53.287448" } STDOUT: running TASK [fedora.linux_system_roles.nbde_server : Require installed systemd] ******* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/set_vars.yml:30 Saturday 28 March 2026 19:31:53 -0400 (0:00:00.494) 0:09:28.475 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "\"No such file or directory\" in __is_system_running.msg | d(\"\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.nbde_server : Set flag to indicate that systemd runtime operations are available] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/set_vars.yml:35 Saturday 28 March 2026 19:31:53 -0400 (0:00:00.074) 0:09:28.550 ******** ok: [managed-node12] => { "ansible_facts": { "__nbde_server_is_booted": true }, "changed": false } TASK [fedora.linux_system_roles.nbde_server : Set platform/version specific variables] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/set_vars.yml:40 Saturday 28 March 2026 19:31:53 -0400 (0:00:00.089) 0:09:28.639 ******** ok: [managed-node12] => { "ansible_facts": { "__nbde_server_cachedir": "/var/cache/tang", "__nbde_server_group": "tang", "__nbde_server_keydir": "/var/db/tang", "__nbde_server_keygen": "/usr/libexec/tangd-keygen", "__nbde_server_packages": [ "tang" ], "__nbde_server_services": [ "tangd.socket" ], "__nbde_server_update": "/usr/libexec/tangd-update", "__nbde_server_user": "tang" }, "ansible_included_var_files": [ "/tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/nbde_server/vars/default.yml" ], "changed": false } TASK [fedora.linux_system_roles.nbde_server : Include the appropriate provider tasks] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main.yml:9 Saturday 28 March 2026 19:31:53 -0400 (0:00:00.126) 0:09:28.768 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml for managed-node12 TASK [fedora.linux_system_roles.nbde_server : Ensure tang is installed] ******** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:2 Saturday 28 March 2026 19:31:53 -0400 (0:00:00.126) 0:09:28.894 ******** changed: [managed-node12] => { "changed": true, "rc": 0, "results": [ "Installed: tang-14-13.el10.x86_64", "Installed: llhttp-9.1.3-10.el10.x86_64" ] } lsrpackages: tang TASK [fedora.linux_system_roles.nbde_server : Ensure keys are rotated] ********* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:8 Saturday 28 March 2026 19:31:56 -0400 (0:00:02.388) 0:09:31.282 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "nbde_server_rotate_keys | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.nbde_server : Ensure we have keys] ************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:17 Saturday 28 March 2026 19:31:56 -0400 (0:00:00.067) 0:09:31.349 ******** changed: [managed-node12] => { "arguments": { "cachedir": "/var/cache/tang", "force": false, "keydir": "/var/db/tang", "keygen": "/usr/libexec/tangd-keygen", "keys_to_deploy_dir": null, "state": "keys-created", "update": "/usr/libexec/tangd-update" }, "changed": true, "state": "keys-created" } TASK [fedora.linux_system_roles.nbde_server : Perform key management (fetch/deploy) tasks] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:26 Saturday 28 March 2026 19:31:57 -0400 (0:00:00.860) 0:09:32.210 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "(nbde_server_fetch_keys | bool) or (nbde_server_deploy_keys | bool)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.nbde_server : Manage firewall and SELinux for port] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:30 Saturday 28 March 2026 19:31:57 -0400 (0:00:00.043) 0:09:32.253 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml for managed-node12 TASK [Ensure tang port is labeled tangd_port_t for SELinux] ******************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:2 Saturday 28 March 2026 19:31:57 -0400 (0:00:00.078) 0:09:32.332 ******** redirecting (type: modules) ansible.builtin.selinux to ansible.posix.selinux redirecting (type: modules) ansible.builtin.selinux to ansible.posix.selinux redirecting (type: modules) ansible.builtin.seboolean to ansible.posix.seboolean included: fedora.linux_system_roles.selinux for managed-node12 TASK [fedora.linux_system_roles.selinux : Set ansible_facts required by role and install packages] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:2 Saturday 28 March 2026 19:31:57 -0400 (0:00:00.135) 0:09:32.467 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml for managed-node12 TASK [fedora.linux_system_roles.selinux : Ensure ansible_facts used by role] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml:2 Saturday 28 March 2026 19:31:57 -0400 (0:00:00.044) 0:09:32.512 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "__selinux_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Ensure SELinux packages] ************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml:7 Saturday 28 March 2026 19:31:57 -0400 (0:00:00.042) 0:09:32.555 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml for managed-node12 TASK [fedora.linux_system_roles.selinux : Check if system is ostree] *********** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:5 Saturday 28 March 2026 19:31:57 -0400 (0:00:00.063) 0:09:32.618 ******** ok: [managed-node12] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.selinux : Set flag to indicate system is ostree] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:10 Saturday 28 March 2026 19:31:57 -0400 (0:00:00.416) 0:09:33.034 ******** ok: [managed-node12] => { "ansible_facts": { "__selinux_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.selinux : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:17 Saturday 28 March 2026 19:31:57 -0400 (0:00:00.053) 0:09:33.088 ******** ok: [managed-node12] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.selinux : Set flag if transactional-update exists] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:22 Saturday 28 March 2026 19:31:58 -0400 (0:00:00.451) 0:09:33.540 ******** ok: [managed-node12] => { "ansible_facts": { "__selinux_is_transactional": false }, "changed": false } TASK [fedora.linux_system_roles.selinux : Install SELinux python2 tools] ******* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:26 Saturday 28 March 2026 19:31:58 -0400 (0:00:00.064) 0:09:33.604 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "ansible_facts['python_version'] is version('3', '<')", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Install SELinux python3 tools] ******* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:35 Saturday 28 March 2026 19:31:58 -0400 (0:00:00.071) 0:09:33.675 ******** ok: [managed-node12] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: python3-libselinux python3-policycoreutils TASK [fedora.linux_system_roles.selinux : Install SELinux python3 tools] ******* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:46 Saturday 28 March 2026 19:31:59 -0400 (0:00:00.919) 0:09:34.594 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "ansible_facts['os_family'] == \"Suse\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Ensure grubby used to modify selinux kernel parameter] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:58 Saturday 28 March 2026 19:31:59 -0400 (0:00:00.083) 0:09:34.678 ******** ok: [managed-node12] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: grubby TASK [fedora.linux_system_roles.selinux : Install SELinux tool semanage] ******* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:67 Saturday 28 March 2026 19:32:00 -0400 (0:00:00.887) 0:09:35.565 ******** changed: [managed-node12] => { "changed": true, "rc": 0, "results": [ "Installed: policycoreutils-python-utils-3.10-1.el10.noarch" ] } lsrpackages: policycoreutils-python-utils TASK [fedora.linux_system_roles.selinux : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:81 Saturday 28 March 2026 19:32:02 -0400 (0:00:01.935) 0:09:37.501 ******** skipping: [managed-node12] => { "false_condition": "__selinux_is_transactional | d(false)" } TASK [fedora.linux_system_roles.selinux : Reboot transactional update systems] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:86 Saturday 28 March 2026 19:32:02 -0400 (0:00:00.047) 0:09:37.548 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "__selinux_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Fail if reboot is needed and not set] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:91 Saturday 28 March 2026 19:32:02 -0400 (0:00:00.044) 0:09:37.593 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "__selinux_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Refresh facts] *********************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:98 Saturday 28 March 2026 19:32:02 -0400 (0:00:00.039) 0:09:37.634 ******** ok: [managed-node12] TASK [fedora.linux_system_roles.selinux : Run systemctl] *********************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:8 Saturday 28 March 2026 19:32:03 -0400 (0:00:01.037) 0:09:38.671 ******** ok: [managed-node12] => { "changed": false, "cmd": [ "systemctl", "is-system-running" ], "delta": "0:00:00.008508", "end": "2026-03-28 19:32:03.971268", "failed_when_result": false, "rc": 0, "start": "2026-03-28 19:32:03.962760" } STDOUT: running TASK [fedora.linux_system_roles.selinux : Require installed systemd] *********** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:15 Saturday 28 March 2026 19:32:04 -0400 (0:00:00.485) 0:09:39.157 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "\"No such file or directory\" in __is_system_running.msg | d(\"\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set flag to indicate that systemd runtime operations are available] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:20 Saturday 28 March 2026 19:32:04 -0400 (0:00:00.074) 0:09:39.232 ******** ok: [managed-node12] => { "ansible_facts": { "__selinux_is_booted": true }, "changed": false } TASK [fedora.linux_system_roles.selinux : Set permanent SELinux state if enabled] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:29 Saturday 28 March 2026 19:32:04 -0400 (0:00:00.085) 0:09:39.317 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "(not selinux_state is none and selinux_state | length > 0) or (not selinux_policy is none and selinux_policy | length > 0)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set permanent SELinux state if disabled] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:40 Saturday 28 March 2026 19:32:04 -0400 (0:00:00.051) 0:09:39.369 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "ansible_facts['selinux']['status'] == \"disabled\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set selinux_reboot_required] ********* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:52 Saturday 28 March 2026 19:32:04 -0400 (0:00:00.041) 0:09:39.411 ******** ok: [managed-node12] => { "ansible_facts": { "selinux_reboot_required": false }, "changed": false } TASK [Add or remove selinux=0 from args as needed] ***************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:56 Saturday 28 March 2026 19:32:04 -0400 (0:00:00.065) 0:09:39.477 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "not __update_kernel_param", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Fail if reboot is required] ********** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:70 Saturday 28 March 2026 19:32:04 -0400 (0:00:00.064) 0:09:39.541 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "selinux_reboot_required", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Warn if SELinux is disabled] ********* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:77 Saturday 28 March 2026 19:32:04 -0400 (0:00:00.070) 0:09:39.612 ******** skipping: [managed-node12] => { "false_condition": "ansible_facts['selinux']['status'] == \"disabled\"" } TASK [fedora.linux_system_roles.selinux : Drop all local modifications] ******** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:82 Saturday 28 March 2026 19:32:04 -0400 (0:00:00.075) 0:09:39.688 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "selinux_all_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux boolean local modifications] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:90 Saturday 28 March 2026 19:32:04 -0400 (0:00:00.035) 0:09:39.723 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "selinux_booleans_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux file context local modifications] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:95 Saturday 28 March 2026 19:32:04 -0400 (0:00:00.037) 0:09:39.761 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "selinux_fcontexts_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux port local modifications] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:100 Saturday 28 March 2026 19:32:04 -0400 (0:00:00.035) 0:09:39.796 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "selinux_ports_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux login local modifications] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:105 Saturday 28 March 2026 19:32:04 -0400 (0:00:00.033) 0:09:39.830 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "selinux_logins_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set SELinux booleans] **************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:110 Saturday 28 March 2026 19:32:04 -0400 (0:00:00.050) 0:09:39.880 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Set SELinux file contexts] *********** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:121 Saturday 28 March 2026 19:32:04 -0400 (0:00:00.027) 0:09:39.908 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Set an SELinux label on a port] ****** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:134 Saturday 28 March 2026 19:32:04 -0400 (0:00:00.110) 0:09:40.019 ******** changed: [managed-node12] => (item={'ports': 7500, 'proto': 'tcp', 'setype': 'tangd_port_t', 'state': 'present', 'local': True}) => { "__selinux_item": { "local": true, "ports": 7500, "proto": "tcp", "setype": "tangd_port_t", "state": "present" }, "ansible_loop_var": "__selinux_item", "changed": true, "ports": [ "7500" ], "proto": "tcp", "setype": "tangd_port_t", "state": "present" } TASK [fedora.linux_system_roles.selinux : Set linux user to SELinux user mapping] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:146 Saturday 28 March 2026 19:32:07 -0400 (0:00:02.151) 0:09:42.170 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Get SELinux modules facts] *********** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:159 Saturday 28 March 2026 19:32:07 -0400 (0:00:00.017) 0:09:42.188 ******** ok: [managed-node12] => { "ansible_facts": { "selinux_checksums": true, "selinux_installed_modules": { "abrt": { "100": { "checksum": "sha256:7bd953bc370c70fe9299b766f8a40a1659e03f7ef4dd6c722c3e182bc90c1c68", "enabled": 1 } }, "accountsd": { "100": { "checksum": "sha256:e8caedff457d24c0562673868860f813a6cf223422bc48524e7cf1e8df7ddeb6", "enabled": 1 } }, "acct": { "100": { "checksum": "sha256:1150e95aa33304027895200fbac6de5d0ec1ada237d1cf255f979bcf712831ba", "enabled": 1 } }, "afs": { "100": { "checksum": "sha256:634c80be00ac898add54ea6d59ead5a6e92e4d06a230b9b4485059070b0a3bde", "enabled": 1 } }, "afterburn": { "100": { "checksum": "sha256:90f08987cd8645d1bc99245841a9f2d0c9858196064df233655623d1b5cfbdde", "enabled": 1 } }, "aide": { "100": { "checksum": "sha256:c59e1e8e511ef99a0e5715ed9dd2c15ea0b522186e683ed8bf715029c4ef325c", "enabled": 1 } }, "alsa": { "100": { "checksum": "sha256:ee1199b88bcd39ff6de202bdef25f1dc7292828d80856fa535fb80454dad000e", "enabled": 1 } }, "amanda": { "100": { "checksum": "sha256:3b9f22d94579c8dd60f827159f6f15a2085d9bb799cbc88d7c1d23ce7a63aab4", "enabled": 1 } }, "anaconda": { "100": { "checksum": "sha256:449d303fa3e44bb7afa7b0a715e9566e1e33fd3368aee1b078529f0225cf56ff", "enabled": 1 } }, "apache": { "100": { "checksum": "sha256:bfefb6205876b2f58e84c1952c749c146f4e2b8107a660e084614b23d60300c8", "enabled": 1 } }, "apm": { "100": { "checksum": "sha256:3a903d39c2d9de406f33790f234fde1f1d0b20bacae36fa0c6bfb5fee9f800c5", "enabled": 1 } }, "application": { "100": { "checksum": "sha256:35030bf2d1dc7ec055a954de113ff7918709262d5c318040b0cbd07018e9ee88", "enabled": 1 } }, "auditadm": { "100": { "checksum": "sha256:5da016180d7da3fa18541f72cc69eb5c9ffebc2851ec3e6150bfd5a73153f860", "enabled": 1 } }, "authlogin": { "100": { "checksum": "sha256:6432b280ab64da2e35f7df339167f29bc9b9dca4c01e8e8a0c409b7a0adbd5d1", "enabled": 1 } }, "automount": { "100": { "checksum": "sha256:856e89b68ecf997f8a33e98c7e4bd2250a43f88790efba170f787434139a8c0b", "enabled": 1 } }, "avahi": { "100": { "checksum": "sha256:78ff1f7154a00c128cbf5c237452baf7ed1cd46cb11378439b64432d1db58d4f", "enabled": 1 } }, "bacula": { "100": { "checksum": "sha256:1e517a22f8a71ea3ef177798685dfb6359b1006205fdc97a0972ff1cf7125f40", "enabled": 1 } }, "base": { "100": { "checksum": "sha256:454cc3d74ae64acf78ad17344d47579841f6b44266c6c3d56f58594918d2e3cc", "enabled": 1 } }, "bind": { "100": { "checksum": "sha256:4d13ddead5cb94be9c944061044e0bd56974a9db9df64f7259593b57d51386d5", "enabled": 1 } }, "blkmapd": { "100": { "checksum": "sha256:00bebe07cf015d4084870d1f0866913ae687801ca2d26e12c00df6823b3bc304", "enabled": 1 } }, "blueman": { "100": { "checksum": "sha256:0cb5bf9ff94cee18667b41dc4d1b988ace9baa06ca99507a91ff3190f4e39d35", "enabled": 1 } }, "bluetooth": { "100": { "checksum": "sha256:233825c029885cb6196920f19b27336b444411b9a15b956c95a2a07b89e9b041", "enabled": 1 } }, "boltd": { "100": { "checksum": "sha256:afaeabb15d1d5e4f3d07865c5213f4a78ae5865d0f782e95d1c599e61b7ed7d3", "enabled": 1 } }, "boothd": { "100": { "checksum": "sha256:2c8ef6be5667ad71b144c8bd4ec606b56cecd4e3ea1d242cbc657c1c993d99af", "enabled": 1 } }, "bootloader": { "100": { "checksum": "sha256:dd35cbec0b5e8f81e3394a60905606fb9d986fd394ad60ccedfcdb60f0137b0b", "enabled": 1 } }, "bootupd": { "100": { "checksum": "sha256:e89032180210c66a288c43d2de3a47b285d38fa239226bd49ae19a1a0488f41c", "enabled": 1 } }, "brltty": { "100": { "checksum": "sha256:96474cc59c799aa0e25123ea9909b4fb319a03f1b5f6cbbf1ae3dcda374815a7", "enabled": 1 } }, "bugzilla": { "100": { "checksum": "sha256:7c8fa6c136fc6624a1dd4345c3484ffbc07c9a4be8b7543d78f0615680cb73cc", "enabled": 1 } }, "cachefilesd": { "100": { "checksum": "sha256:1b066f5d029b5584d34d95007991d218446244f994f3ff802339cd5890e48091", "enabled": 1 } }, "calamaris": { "100": { "checksum": "sha256:60ca58fba194f53faf1c0bc41f8eeeba9ca3de6f2da08f8940b6d1d3093e7c0f", "enabled": 1 } }, "callweaver": { "100": { "checksum": "sha256:815d2bba5c316d5d0334add30dca473daf3fdc85e48785c26c7b47b2ef833823", "enabled": 1 } }, "canna": { "100": { "checksum": "sha256:4ec687f59310bcb03685bec14fec451d393508d1ca5f926209ba967d42673d90", "enabled": 1 } }, "ccs": { "100": { "checksum": "sha256:b6821587c3b2df8dc3ce8de9851cb1be120dfd68e5729141e7a293917029e978", "enabled": 1 } }, "cdrecord": { "100": { "checksum": "sha256:df9850293d6833d206bfb3a875bdf69d0823daf24993b30f962da683032555e2", "enabled": 1 } }, "certmaster": { "100": { "checksum": "sha256:de4651616a6c8dea0dd4b018d3ab32c1506ba75188d1bcab2e04af461eea6040", "enabled": 1 } }, "certmonger": { "100": { "checksum": "sha256:91ab7c5c9df2a80b515c52b105f54e9247b092be7864be939d880b2f94cec862", "enabled": 1 } }, "certwatch": { "100": { "checksum": "sha256:bec8a93b694c60226db8744867c6f87775440937699ac0d023e06e7b7aee1d6b", "enabled": 1 } }, "cfengine": { "100": { "checksum": "sha256:3f5f3b049123ab0a61d1f7a7e6372bd7d2194feb212f2b5bd85a9148f21f7db6", "enabled": 1 } }, "cgroup": { "100": { "checksum": "sha256:0ae822bb67f347f0a88f4ec8584f394e3e10fc11363dcf34b1d583305e76c9e6", "enabled": 1 } }, "chrome": { "100": { "checksum": "sha256:d20dacb3b990c66c37bbf1bbd081a84a0e35f3cdf1501c27a5ec881c3d187d84", "enabled": 1 } }, "chronyd": { "100": { "checksum": "sha256:090e59b1324bf559d79a1ef363fe9bc1bd2adb928f6a95bb1628c92f93063415", "enabled": 1 } }, "cifsutils": { "100": { "checksum": "sha256:80b987a686635b3e05bedf481ef892af7231100a61fbf6ca5e93da17dbb887c3", "enabled": 1 } }, "cinder": { "100": { "checksum": "sha256:9fa130934871404f743c4803af509afa78e56b3ba2f83bd108564858f163329f", "enabled": 1 } }, "cipe": { "100": { "checksum": "sha256:a68798c10fa97ddee5f54ac1d1281ecce65750e4e151076f4ad826187fc647a2", "enabled": 1 } }, "clock": { "100": { "checksum": "sha256:4e04381e36d9df4d9f19ad718b1ddf4686f633f72b24d1161055b1f7280a81d4", "enabled": 1 } }, "clogd": { "100": { "checksum": "sha256:33c562fd35e8b9fc5fdf807c488d1ac4adfa6c3b92dbbf87034a6732478e1bf7", "enabled": 1 } }, "cloudform": { "100": { "checksum": "sha256:8279ce237a5b4ffe5a80db09e71f06bdc8a4838910274ffc4e240ec99c185df5", "enabled": 1 } }, "cmirrord": { "100": { "checksum": "sha256:f89476b4ce6acf51cb0628609027a6c44a90db4ccde4da07505b5332a00b7c63", "enabled": 1 } }, "colord": { "100": { "checksum": "sha256:8a072efaf9d6f3af5ec04477f28ec73585274598b69d2e8f24c8180dcfacb15c", "enabled": 1 } }, "comsat": { "100": { "checksum": "sha256:d5d67d239ca7cd2acbd4c5e15fbbc0f97810139fd352e9966c1e63a7d6ab5188", "enabled": 1 } }, "condor": { "100": { "checksum": "sha256:a4da29d700315627bf480c63220b2b639ec0b87435f9ecca111eed86c1e019cd", "enabled": 1 } }, "conntrackd": { "100": { "checksum": "sha256:36bd297ee2c16ed1564895422c05f51d957f09ef17120ac2efc93dc46d2d81a0", "enabled": 1 } }, "consolekit": { "100": { "checksum": "sha256:053f0dac3d8bc41d9dcbaf9b3f1c2e55ec313e07465db7462fdacf8fd89ce553", "enabled": 1 } }, "coreos_installer": { "100": { "checksum": "sha256:9fb7d00873d78a196b1fb639f107a92cf007803c7eaa2658eba7ed05081acb99", "enabled": 1 } }, "couchdb": { "100": { "checksum": "sha256:59f3c694a3ba5e60ece2b1ddeb5f5bd4f00fdaa67a5c7aa3a8fe7bd302963523", "enabled": 1 } }, "courier": { "100": { "checksum": "sha256:c05ca77b6a73640331abcf4018a9b7f2f3733f9e128bd96d7131ab7ba1fa823c", "enabled": 1 } }, "cpucontrol": { "100": { "checksum": "sha256:0e54e45a5adaa7cc24e6a273e25693919e92f498e42b8e136b7d7bf29be2d6af", "enabled": 1 } }, "cpuplug": { "100": { "checksum": "sha256:629423401aaf5d0f529905a421a461d2f1d7ddbdb94020a140831f8873724c39", "enabled": 1 } }, "cron": { "100": { "checksum": "sha256:7ec2279bb83c931e6f379f45255a0727d207838ab55930f7595e0ab1e95b8db3", "enabled": 1 } }, "ctdb": { "100": { "checksum": "sha256:601b41f04bdd9789e01a1158241a17c7c4f937c88adbc75e9bf8875ee7cb0756", "enabled": 1 } }, "cups": { "100": { "checksum": "sha256:9f9cfd140d7b13b9679ba8b8d7a59366294db02d816d60af2e00a3fff1f6fed9", "enabled": 1 } }, "cyphesis": { "100": { "checksum": "sha256:5d64fbf2f59d2c8ce842a9e8adf39877e41bb1d3e77c374681044aafbd662d7d", "enabled": 1 } }, "cyrus": { "100": { "checksum": "sha256:1ce15bea5149f786d9b714426a2870c43d01107f2e3a6bd4b5b324a166508dbf", "enabled": 1 } }, "daemontools": { "100": { "checksum": "sha256:cd287fe5971d71a4512ad52ad855f427c8b722cf7aec6e884ca646ca3da0df2b", "enabled": 1 } }, "dbadm": { "100": { "checksum": "sha256:f6643411d4b5fbc33bd87d4b3b1d4ea1b5d3659a2092cdee9ecbd4dd700af416", "enabled": 1 } }, "dbskk": { "100": { "checksum": "sha256:41bc4ffe76c9e5c220822efd68a2e55b1126b38f646b7c4016a36263a89e482d", "enabled": 1 } }, "dbus": { "100": { "checksum": "sha256:fb9a0c7ec7a8627b89649e44dd9e2d6e4cf70166b2a55f6509f898695510376b", "enabled": 1 } }, "dcc": { "100": { "checksum": "sha256:8b52f0bebd92342ee6b7e00dfe3e20d3a0f041badd4312b9b22b3d3ab0d1b3b1", "enabled": 1 } }, "denyhosts": { "100": { "checksum": "sha256:22ed092464b3757fcc58749af15cc33319f406db1747f4b28f74feb123969612", "enabled": 1 } }, "devicekit": { "100": { "checksum": "sha256:7633e1cf2075f6323862d89b5e0072681e64e41895b6caabbc8c6b18223dce9c", "enabled": 1 } }, "dhcp": { "100": { "checksum": "sha256:1da30094d8664d16dee43b934829c800003e49304f1540e5b41f9fb12a2df4df", "enabled": 1 } }, "dictd": { "100": { "checksum": "sha256:6cdf81585aeb903ef5da64551f6bde953aeb48f8623a8d416485847541b7b283", "enabled": 1 } }, "dirsrv": { "100": { "checksum": "sha256:1af7de0f7c691873148f17453849b3dee97e78a1e8108755c1c133c05f29b651", "enabled": 1 } }, "distcc": { "100": { "checksum": "sha256:bd9199873915ce6fadfc570fba837765971726dac64a74e1ba74c55dc0b24067", "enabled": 1 } }, "dmesg": { "100": { "checksum": "sha256:1205bd72660c46019cfb8c3a899accaefb280f5f6bda63850ee2b508cc4542d6", "enabled": 1 } }, "dmidecode": { "100": { "checksum": "sha256:b799553c2c0ab0abd040196142394a15d429e15b573df56edd0e150295d6993c", "enabled": 1 } }, "dnsmasq": { "100": { "checksum": "sha256:bdaf9c5be3de423b3d1b72c8bf38e2315fd58ce10ca6a58873c7d3e3a9c8aed2", "enabled": 1 } }, "dovecot": { "100": { "checksum": "sha256:1de79cf621df4cb04b8ee1201f38c91d8a23cfd85928894d4f9a8d3a27dd99e1", "enabled": 1 } }, "dspam": { "100": { "checksum": "sha256:5d8847ac4f68cf59bdc174bc1ce3688f86efbdd4a4563f701cdc74b2fa01504c", "enabled": 1 } }, "fcoe": { "100": { "checksum": "sha256:58fbe8fa7832fec940b7afc7ffe8e4357ddb5a03a662687b928f84029d81c781", "enabled": 1 } }, "fdo": { "100": { "checksum": "sha256:c821191e37683fab6a25fa714edaa75bcd7a81760fa8b547c31e40967875a29c", "enabled": 1 } }, "fedoratp": { "100": { "checksum": "sha256:09288902a734ceef738fc904463b50798ce700c15059c70d092412b12ead156d", "enabled": 1 } }, "fetchmail": { "100": { "checksum": "sha256:9fbdec8e421e1fa27dfea13b163cd0810d404845ee724b6f1b3ca5e6500a42c0", "enabled": 1 } }, "finger": { "100": { "checksum": "sha256:9144a6012aa7771292a276576f811b7948abf4b7fe2e07f05c66d232d5811055", "enabled": 1 } }, "firewalld": { "100": { "checksum": "sha256:ae1f3ce0ff3a003f1db93dbbe09084b0ba32675b332f9930f23f9f5e66f57204", "enabled": 1 } }, "firewallgui": { "100": { "checksum": "sha256:60856e056bdd9de8ffce0f5468846b00616fad40f87d38d5fa73acb74475d83b", "enabled": 1 } }, "firstboot": { "100": { "checksum": "sha256:8d10737fea4fe0dd3ae3725002a8f0c5889a3645ba4894e9dccec01a3e51b3d9", "enabled": 1 } }, "fprintd": { "100": { "checksum": "sha256:260a661a05f5958d32eecc692d9d5350d51ec0ef9e9bf29aad653d8637ceba29", "enabled": 1 } }, "freeipmi": { "100": { "checksum": "sha256:e206bfbfcbe748672784fe52a91a1220965bcae5ff57dab458ade953f0b17b80", "enabled": 1 } }, "freqset": { "100": { "checksum": "sha256:8826b12f85b02168080b03dec5eef5c91283ba1ebf8370022a71170064a97dcc", "enabled": 1 } }, "fstools": { "100": { "checksum": "sha256:00b8b8e23b9e36087646cffa7c5126b0a402ac38a958930d27fd058f78f67987", "enabled": 1 } }, "ftp": { "100": { "checksum": "sha256:181e899c092e42a648f7474f936d3413769842e4a0192dbc91cf587cd1547ffc", "enabled": 1 } }, "fwupd": { "100": { "checksum": "sha256:54578edd17537e1639df33aa54a731059844519c32cb8dee24e31b29f499dc67", "enabled": 1 } }, "games": { "100": { "checksum": "sha256:325a80a2f12fed84077e57ac8725cdbd3449114115ac74904280c05c4d9f1597", "enabled": 1 } }, "geoclue": { "100": { "checksum": "sha256:9ac486b2d71758e95a106894de9c4f5b21506e07caba5d3753964556cb042fab", "enabled": 1 } }, "getty": { "100": { "checksum": "sha256:0a0e0d24bb9866726e90384d92166829d3c43e6086613b425735544745295adf", "enabled": 1 } }, "git": { "100": { "checksum": "sha256:cc208709ab1c0862004f9576e53a62665826c6cdb5f443eb463d8743cc399769", "enabled": 1 } }, "gitosis": { "100": { "checksum": "sha256:9505b4010a4aafa33b27c1a73f02f7fb2ff720e95ef943b40db387b893b7499a", "enabled": 1 } }, "glance": { "100": { "checksum": "sha256:a1966f6618bc0d636a87d83d852abba0b92bcb8aaafe82837b39958954490ad5", "enabled": 1 } }, "glusterd": { "100": { "checksum": "sha256:80108836908472e7859b47ff8ba90d2c629f02666a3246c2dc7e6039ee1dc099", "enabled": 1 } }, "gnome": { "100": { "checksum": "sha256:42e7cda751258014b8bf2492522d20dcc0a1c96027d8261b7996289ad136ee7d", "enabled": 1 } }, "gnome_remote_desktop": { "100": { "checksum": "sha256:840c649229032dfd9b5880f50fcd371e5cc4c87fba7d424f03f3f5f28cb1f686", "enabled": 1 } }, "gpg": { "100": { "checksum": "sha256:ce63d6d0ffc035614b61d82eae48a44485151cb6e93a0617c782116187ab1ad3", "enabled": 1 } }, "gpm": { "100": { "checksum": "sha256:3b3f4538fdffe23885b90ece09b6859afc8a0b7f3314b9b4a60bcb9525776725", "enabled": 1 } }, "gpsd": { "100": { "checksum": "sha256:8184e98e265b9082358f87a8a715bf235f96c31008e60541b742525e7f09bce2", "enabled": 1 } }, "gssproxy": { "100": { "checksum": "sha256:a57b0a11f54bad916a170bf890b15978ad925ccc5e976d9d7b94b6c66f7c2e83", "enabled": 1 } }, "guest": { "100": { "checksum": "sha256:fc4a2c076ee26500d58559dfd29fe267a6f1ec33515064c8daa16448b7aaca9a", "enabled": 1 } }, "hostapd": { "100": { "checksum": "sha256:b13286a614402a3538fc0387f3d7abc30085c382a33e83faed9be57f33b63f45", "enabled": 1 } }, "hostname": { "100": { "checksum": "sha256:37d95ab4a25b542db931edf26632d35e3a969239ff1de338b037e2e5ec506fad", "enabled": 1 } }, "hsqldb": { "100": { "checksum": "sha256:1eab1ed96a9f87898b99be5005c598d35dc079b1ab5a7214ceb6e3e5c50f8810", "enabled": 1 } }, "hwloc": { "100": { "checksum": "sha256:6719dc568ff70220e53b2f1ed86d9a395a2f038d99901396022e4dc63d4ae868", "enabled": 1 } }, "hypervkvp": { "100": { "checksum": "sha256:c280b017518cea08d176260a60012fd4d62882dcdf6bc9fc2005c74573b2240c", "enabled": 1 } }, "ibacm": { "100": { "checksum": "sha256:a6e5ded6ba1592d16d507e4f87b6078156d99e9554184a9912a3a91819ebb5df", "enabled": 1 } }, "ica": { "100": { "checksum": "sha256:a90844f8b8a25de5abadb4887f1b1ac84367f5ae248d9213a90a39859b3e5df3", "enabled": 1 } }, "icecast": { "100": { "checksum": "sha256:40b455ce92e388b7f1eb0c65645000ae54076221c2acce0fa34c6f8d29d6ee67", "enabled": 1 } }, "iiosensorproxy": { "100": { "checksum": "sha256:392808628481e796663a1b99d1340efca31995d4832ec45fe71a939f12c117e7", "enabled": 1 } }, "inetd": { "100": { "checksum": "sha256:59557d1383fbb0a9586e18a4b129912d3ff989dbb853ed29bd0e27dfc160351d", "enabled": 1 } }, "init": { "100": { "checksum": "sha256:c850d134886113631f28665513a0536ca98fce16e53a9b3f146d1449ae9e0ee5", "enabled": 1 } }, "inn": { "100": { "checksum": "sha256:208231fcd39727d36f759dca410d8675e5852b7330f966aa86dc6e37c9abb22b", "enabled": 1 } }, "insights_client": { "100": { "checksum": "sha256:593cf420e0ac5523489f53d4b0cf2af0eaf8821d841f947349963159834a764a", "enabled": 1 } }, "iodine": { "100": { "checksum": "sha256:630a305bf2ae45b8211c97cd029f1ae4247e0a00f936d8595e3cff59570cbd5f", "enabled": 1 } }, "iotop": { "100": { "checksum": "sha256:104ca47441ca07c42c5e4770c1eae2178d2cdb880a174581032c7f846a05fb6e", "enabled": 1 } }, "ipmievd": { "100": { "checksum": "sha256:b0baf75f1edb1c27f1caf49a30874604f82791ee1b1c85c38a06195f8d806b0e", "enabled": 1 } }, "ipsec": { "100": { "checksum": "sha256:ba9aeb152542b5bd253d5a6e3b6aeff3e857615f4f42836c19098d45263fb120", "enabled": 1 } }, "iptables": { "100": { "checksum": "sha256:177e6ff2bd9b8e6800b6138497d26b5cdd005046f6c62f672ecc66701b1251c9", "enabled": 1 } }, "irc": { "100": { "checksum": "sha256:32c9122d027bf6229b8cf18a4d45fc63e38c5b0a3656312854833e4342e0e608", "enabled": 1 } }, "irqbalance": { "100": { "checksum": "sha256:42c6066d4a0751cb1db4526c055b0527a4d9403b45794571ea0dc4c71a666bec", "enabled": 1 } }, "iscsi": { "100": { "checksum": "sha256:997985873de7774ecab07db71db7974723494b65a569e2f852977c25d381359c", "enabled": 1 } }, "isns": { "100": { "checksum": "sha256:80496dfdf52576d83029c83097446766868b289a06aab9e9df110b733594a98e", "enabled": 1 } }, "jabber": { "100": { "checksum": "sha256:c739061ae87ecfdebea9afd0b8021aa3ea154e8e1ef00ba148c82d225ee0c8d2", "enabled": 1 } }, "jetty": { "100": { "checksum": "sha256:81d97ceabbc97f1b524d3e0e60904f5225fcc44996a83d9db67b7ef3d8b18075", "enabled": 1 } }, "jockey": { "100": { "checksum": "sha256:8eecfbe8b3b75068c3c26b6fee1cd79009098d65b962b8a847438e8c31e9d053", "enabled": 1 } }, "journalctl": { "100": { "checksum": "sha256:2ae3ef5124e180523c5f610cbd536ad55c7e0b8e7c551201c29827e59c7c1594", "enabled": 1 } }, "kafs": { "100": { "checksum": "sha256:34f943a522e251615c58df783c4ace2086a1752a3b69e5cbfef2ec5d42234da5", "enabled": 1 } }, "kdump": { "100": { "checksum": "sha256:a0a2baa7b6c1d5ed5e5582f7ffc7d5a8cf2d4e7d034f50b1f3d0972fc9674939", "enabled": 1 } }, "kdumpgui": { "100": { "checksum": "sha256:78f45331782c43239be7330f5b928d9dace6b3ebbfda5e07c1374c462fe06923", "enabled": 1 } }, "keepalived": { "100": { "checksum": "sha256:41297d28af002c4e97c864d3b5ee64f49519b4db72a71b5bf7cd104c2b05af0a", "enabled": 1 } }, "kerberos": { "100": { "checksum": "sha256:2d6c154dc940a2c178931902f7e0c0a1e9f9956055f92fc1bc92b1f2143a674d", "enabled": 1 } }, "keyboardd": { "100": { "checksum": "sha256:33d8e3fbc9f8f48ff7a69685721a782c9f8b62bbbd1878e9bafefad5bdcf51db", "enabled": 1 } }, "keystone": { "100": { "checksum": "sha256:653fca3667c90bf30da196ab61d79ee5afe1ae9703324b2512180986eec8d6c2", "enabled": 1 } }, "keyutils": { "100": { "checksum": "sha256:949cb7c7b62d17c998f63d9970d6fefbf5b3d56d65f729bf21a4f6703135e3f4", "enabled": 1 } }, "kismet": { "100": { "checksum": "sha256:c1e22e4778b465a08d815aaf53d71ba28122b061bef976f522a2304366849a2d", "enabled": 1 } }, "kpatch": { "100": { "checksum": "sha256:a308db644962bd0893fe1b8bc6571460b377f728ac28632852ca3b9c281ed74e", "enabled": 1 } }, "ksmtuned": { "100": { "checksum": "sha256:9925a9acfb6375d93a08546a581a90375ee8582972cfc9d6884204d538b895e6", "enabled": 1 } }, "ktalk": { "100": { "checksum": "sha256:0c9136b18fb83249b1dd825fd497435d852adfaddc9d618ac4d269843a458317", "enabled": 1 } }, "ktls": { "100": { "checksum": "sha256:f15a20f050208e43060eafa61f63a8e722792b76724c7f2fc44c856879ac70ae", "enabled": 1 } }, "ldap": { "100": { "checksum": "sha256:f2322f689c55de691d98651af5bfece0b87608950ccd1a92e9225cfe47415851", "enabled": 1 } }, "libraries": { "100": { "checksum": "sha256:454587674794c66f8b25f9e90154c291e81f6ab93d7c8fb3107068cfcefb797d", "enabled": 1 } }, "likewise": { "100": { "checksum": "sha256:4d05909abe38f75a72561bb28fb279f4771d6886406de5d4665111db56181972", "enabled": 1 } }, "lldpad": { "100": { "checksum": "sha256:dbd4d9d61f7e57925f7a61e0a42d65273d8be168f6e3c77b5467d7b9a93817ff", "enabled": 1 } }, "loadkeys": { "100": { "checksum": "sha256:3121357ab50a02cfc634a5fe4250aff89a1418865918569b77a10cd333cc0018", "enabled": 1 } }, "locallogin": { "100": { "checksum": "sha256:3390d25acd3ece1c7404db8c3db0f5c80278d5063fab9c8f4a8bb5584b5ded16", "enabled": 1 } }, "lockdev": { "100": { "checksum": "sha256:bc457c7839567f5943e06ec31f915742988f5e602c918a3a0d46bde5b94b6c78", "enabled": 1 } }, "logadm": { "100": { "checksum": "sha256:d369ef834c0087ca09871e4dff0128cfc8e39a97e1e3b5bd3001fd752b7af5cb", "enabled": 1 } }, "logging": { "100": { "checksum": "sha256:c739c49825488aa1ae74fd218a5718aa3c859cd1205a1ea581710fe539bfbde6", "enabled": 1 } }, "logrotate": { "100": { "checksum": "sha256:6a59e4d4df92e3d73d66b34035aaf00f5ca0306da24bd478c72a39c7e7844960", "enabled": 1 } }, "logwatch": { "100": { "checksum": "sha256:4196d8e4db83bd37b4e883383dfe8543fb33029b42c557fe5af7e8475b558584", "enabled": 1 } }, "lpd": { "100": { "checksum": "sha256:5427ae01212227c3a719cd1e5664c1290175bd574d7927903102147fa51989c0", "enabled": 1 } }, "lsm": { "100": { "checksum": "sha256:7d1a24bbfe8deb3a3d7aaa92bfc9c922baba1476561b92f828aae226fe9dc3c4", "enabled": 1 } }, "lvm": { "100": { "checksum": "sha256:b772895524eef04c9c79093c837e6033beff39717343d76528a8a85e4a466bb6", "enabled": 1 } }, "mailscanner": { "100": { "checksum": "sha256:5017fd004213b4ceaf374bebf74e35a0084faaf6cede37b78769036a05e34b9e", "enabled": 1 } }, "mandb": { "100": { "checksum": "sha256:7c71eef6360c66869a42a19a34ee30abc1064de8fbbcec0098d2ee57fbedb79a", "enabled": 1 } }, "mcelog": { "100": { "checksum": "sha256:cf5a647f3682f454b850317643416460ce6a7710f3f5fec6b0deac40e3c72e07", "enabled": 1 } }, "mediawiki": { "100": { "checksum": "sha256:067389c903715a12a93937a436e3df918c42a4871765668bea50eca4f02212ba", "enabled": 1 } }, "memcached": { "100": { "checksum": "sha256:6cffe11f14b5c03ba0969f0a3f476455cfac505f2cc1f2d467222a21a3ed7c5c", "enabled": 1 } }, "minissdpd": { "100": { "checksum": "sha256:1ea9c32ae0a7becd1e1879dd4c4b367d450b2721dd8fc3f771081d1568b450f5", "enabled": 1 } }, "miscfiles": { "100": { "checksum": "sha256:ea5057da646444d5450ff16e5dcb82ab338e8fd5fcf5f8dd72e782ef18ad1031", "enabled": 1 } }, "modemmanager": { "100": { "checksum": "sha256:8de073e5cf69c58d03162e50f5fe7537ac8f90c81f02d2906cb10a910a414ec7", "enabled": 1 } }, "modutils": { "100": { "checksum": "sha256:7d0336a428c29ae9a91c18857f594a16f74f5a963607fff966e7de78102ff76b", "enabled": 1 } }, "mojomojo": { "100": { "checksum": "sha256:0464738bfa038fc9ba7ce06c15abf3ff5c2113083e236dd8b96b5d85b1fb51b7", "enabled": 1 } }, "mon_statd": { "100": { "checksum": "sha256:9489c6c732b353e34ed3e5624fe8b73c336f4786c47bc30827b4a5a59b7dca44", "enabled": 1 } }, "motion": { "100": { "checksum": "sha256:660ecac63132d47b51afaeea6f55f74e3a6f25141a4d0d28065e094d7cdc6c75", "enabled": 1 } }, "mount": { "100": { "checksum": "sha256:b0a2d9c52715e340983df89e8adb304ff3790b2564659fd821843a3f172d46d0", "enabled": 1 } }, "mozilla": { "100": { "checksum": "sha256:04b77283c6d821ca98ecb58ef7bd17f6f185168786887a67f4c71cceeaa0476c", "enabled": 1 } }, "mpd": { "100": { "checksum": "sha256:ff9433431cb560a4ff03dc02129289a0f78d1909fe1f3954347f18e318c3cdc4", "enabled": 1 } }, "mptcpd": { "100": { "checksum": "sha256:dc069f3a6c78dc367c39cd7e50fe17948cf9877f3e306f090f1160b07989d503", "enabled": 1 } }, "mrtg": { "100": { "checksum": "sha256:6890958fb0f7c357a4a9600c34e21bf6fc9fd8ef36e9a5ad516b3bf2c1d88bd6", "enabled": 1 } }, "mta": { "100": { "checksum": "sha256:b61027e2a84c3f6fffbc7eb3fd40788bd9dfb036b3e04a8f77d233e10c9f2ec8", "enabled": 1 } }, "mysql": { "100": { "checksum": "sha256:e08540cc55168dd36811b1962936ffacaa21be50b15b9d5d34fa9d55dfd125d8", "enabled": 1 } }, "mythtv": { "100": { "checksum": "sha256:bd730a6479baa42060a62b9c7346dfe21ce28e1a8a432342aa5f302c2cf8ef86", "enabled": 1 } }, "namespace": { "100": { "checksum": "sha256:01131128229571749a7f5df2e65e22e9850789bfe386926cb34e91153ca9e88c", "enabled": 1 } }, "ncftool": { "100": { "checksum": "sha256:edb0f4d496b429a2b09ff9b1d74bd30126b5ee2265a4370f6e992cf9d696de0e", "enabled": 1 } }, "netlabel": { "100": { "checksum": "sha256:b28911955f6731646cd779f6b89c2255238c3e60e1b93d227ce588484694f755", "enabled": 1 } }, "netutils": { "100": { "checksum": "sha256:8bc2fc39e9a6cef06df178607ff3e17604e86d709575d37a60de5c1fd2b9fead", "enabled": 1 } }, "networkmanager": { "100": { "checksum": "sha256:6980bdebf1af99aa6822dc970cd6d5a5b430381aa11e96e40244db39265b5e4f", "enabled": 1 } }, "ninfod": { "100": { "checksum": "sha256:3b235676dff7abd25b2b57fa770833d05561bdd24216f4de1202e9ced52a4f4a", "enabled": 1 } }, "nis": { "100": { "checksum": "sha256:33be40fa2b50df5f7234ead34a6471ff1eea62de62445e509c28e5bc8a730364", "enabled": 1 } }, "nova": { "100": { "checksum": "sha256:0d4fd8a1f74c8e46c18a93794b305dcccf3d50e9db095b659d996712e2905dc0", "enabled": 1 } }, "nscd": { "100": { "checksum": "sha256:d4f61bea290cce978cbb1653866414f9f848bc56ee6491cf022e9131dd2ff5fe", "enabled": 1 } }, "ntop": { "100": { "checksum": "sha256:6f174abacc65b0de9248c39a31210eecb6fdbcd15ecff5bc254fb0d366f83806", "enabled": 1 } }, "numad": { "100": { "checksum": "sha256:5053d74b0f4734131234b4faf6cf7815a725bfd5b73b6acf07deb77a3cced1e2", "enabled": 1 } }, "nvme_stas": { "100": { "checksum": "sha256:0538a3f6b5c469223bfb2740d7365838eedf7ef65b89353645e9d3bf6e17253c", "enabled": 1 } }, "nx": { "100": { "checksum": "sha256:f8b11739918f67700fbef58c2ab5c87a61413acf6aa8b650a014285c0c3684e2", "enabled": 1 } }, "obex": { "100": { "checksum": "sha256:a3b7c308fe73bec0edcfceb85e1e1799927a4d7e25ec4314649b447f670a49ef", "enabled": 1 } }, "oddjob": { "100": { "checksum": "sha256:dd752acc5dc10414a4708dc0bc655d7861bfa74bb20863aa10335dacc53357ba", "enabled": 1 } }, "opafm": { "100": { "checksum": "sha256:bd4724acfb4c0ec9283595e24e29f9926c18e7af0169fd5eb344ed00de6bf393", "enabled": 1 } }, "opendnssec": { "100": { "checksum": "sha256:f1e989b744c90ee0be0978d34da65a84fdd81e5b6aef8ba116560bc157d73f0a", "enabled": 1 } }, "openhpid": { "100": { "checksum": "sha256:d2bd05813a6a5257688f9bb486a1bda49fb169eab4f16c3d503e01883c52bd11", "enabled": 1 } }, "openshift": { "100": { "checksum": "sha256:03597af2e3a916f7c4eb83e1b360b24cad9e86ce814494bd68da602991a70e7e", "enabled": 1 } }, "openshift-origin": { "100": { "checksum": "sha256:66173ad07abd0c8bb7e529350399507549601923afeca8e2ff2b0f80cb9992e3", "enabled": 1 } }, "opensm": { "100": { "checksum": "sha256:3399e9663584d6d1032992f903b7aba4f96f4f0b7a5971faf90eb816cc7655b3", "enabled": 1 } }, "openvswitch": { "100": { "checksum": "sha256:c1107cdfed17e78cabd9094b3f6aa1d9537f70bb4ddfc236983cc5fdc167e8ca", "enabled": 1 } }, "openwsman": { "100": { "checksum": "sha256:c73d5f710032819a6456d1020ef5fc8bb683aeb167b6169f56a295c31b14c72d", "enabled": 1 } }, "oracleasm": { "100": { "checksum": "sha256:d733f8dbbcdcfa398f6f139831236fa6cd0abdf132090435bb647081d2f6a785", "enabled": 1 } }, "osad": { "100": { "checksum": "sha256:44657ecdfa5bc1235f85a50222e025ac4721b24a01af6d167525f7cb0a580c31", "enabled": 1 } }, "pads": { "100": { "checksum": "sha256:92ded69a63e7ecda34b1d8ef17ffae8c9e8075046a724f8f8242f4b66d2eff19", "enabled": 1 } }, "passenger": { "100": { "checksum": "sha256:5dc833e3b3dd31a1af446c7883f6a2b92c40b9192d072ef5de2fda7ddf4f84ad", "enabled": 1 } }, "pcm": { "100": { "checksum": "sha256:924bf0bf4f0b2ea9d633ef46f55793acb2eb3da6379bacd355814507e5ddf67a", "enabled": 1 } }, "pcmcia": { "100": { "checksum": "sha256:8d6835bdf52f73dfd1acf73ce13ea8325b0bd3d0107b0ba86953fe2fbee20330", "enabled": 1 } }, "pcscd": { "100": { "checksum": "sha256:016a326cb4a747756723c0e7d675e4992e8abfd1f51a6c06aa93066bf45412ea", "enabled": 1 } }, "pegasus": { "100": { "checksum": "sha256:ee292c9774f2109ffcef5b2a1ac7ae68e44f719ba40d155f84287fe03a6c01af", "enabled": 1 } }, "permissivedomains": { "100": { "checksum": "sha256:2453bad4ace526f3cf2c60b358e95a5476692ef25da107b10f52f3af27c056d2", "enabled": 1 } }, "pesign": { "100": { "checksum": "sha256:5d77621f8da0f789c1b9ea9ac24925e02e0a7fe2a3a26cd7e5f46085277041bc", "enabled": 1 } }, "pkcs": { "100": { "checksum": "sha256:6cfcf3051765f61e954cd243d3b652cee14d378e4925b12569512e5ae815b40e", "enabled": 1 } }, "pki": { "100": { "checksum": "sha256:07669cb2df2c61ec4cb621f3332f77f351facaaf5232a8a72c61a5ee7bb44d71", "enabled": 1 } }, "plymouthd": { "100": { "checksum": "sha256:24e235787e311d82b99df7b41d724da0e18edc3bc6443f9f83f8d6247e33cbac", "enabled": 1 } }, "podsleuth": { "100": { "checksum": "sha256:2c0350e46ff4eb97af27f63025763c565d7097457d4cde6f46088afe7f8929e9", "enabled": 1 } }, "policykit": { "100": { "checksum": "sha256:6c7d4f4b8227aa55a5f142bbb8faef130cd10710101eb6f0aacb62547db5f49b", "enabled": 1 } }, "polipo": { "100": { "checksum": "sha256:d59109d36dd2868269eb18631e37feb5981db0aa780c55f7e0fb66d897e4f48c", "enabled": 1 } }, "portmap": { "100": { "checksum": "sha256:93a95273e16837c24572e635d58446ed1162ecbfed59695e866058df4dcbec2c", "enabled": 1 } }, "portreserve": { "100": { "checksum": "sha256:f878b2cf560b4bdff33fedf8c8f2011af390b77ee8f9416fe93ebf46153c97d0", "enabled": 1 } }, "postfix": { "100": { "checksum": "sha256:7c128725a61bd30f3e35f39b9a832e5cd3ef435dde58241616b24e28f67ffbe1", "enabled": 1 } }, "postgresql": { "100": { "checksum": "sha256:60153b9f850c92927ce2a61becd9c248ef56dc0ceb7ba990185b98eaa9b011bd", "enabled": 1 } }, "ppp": { "100": { "checksum": "sha256:ae9f1c81d0877b9f40c9d9bb5b862b7c58c73da9045f850a0a72d1b982fada35", "enabled": 1 } }, "prelink": { "100": { "checksum": "sha256:8d550f8b9e80beafd06bc1392e60ecba8e922f8d0e609fb6674de5cf27c8d772", "enabled": 1 } }, "procmail": { "100": { "checksum": "sha256:ff82ca8bf6365948aeaf3c14fbc7ea9a212074d1462a31aa676b542d0d76c882", "enabled": 1 } }, "psad": { "100": { "checksum": "sha256:664148c3f8d4a649714cdbcf15e4862a5e648e0aea83d4530d23866c78c8d8d0", "enabled": 1 } }, "ptchown": { "100": { "checksum": "sha256:d58fb38422b37d406bf3e79136e3a94a40885c08f9c1591975c9a7495b7f606d", "enabled": 1 } }, "pulseaudio": { "100": { "checksum": "sha256:8194c7df0ea3abd18f07481b0181e01c5fddb21ebb594ed5b20bc1ced555fb27", "enabled": 1 } }, "qatlib": { "100": { "checksum": "sha256:ef1377e6864d9b5049866f6f0c3986e474499f1bb0082e9430f208e2c9d84b54", "enabled": 1 } }, "qgs": { "100": { "checksum": "sha256:add48a13d9b3cc5c82c73c2ca7d72db10b074970c14e26d58b88f670f9221655", "enabled": 1 } }, "qmail": { "100": { "checksum": "sha256:c5e1779123c640fc55da0871bfd96bb124d8c9b50b9065136c025c83364f453e", "enabled": 1 } }, "qpid": { "100": { "checksum": "sha256:71a7ff78c03cde811d19a4c115de8a898007bdf437a9350d4708b3f9142481c6", "enabled": 1 } }, "quantum": { "100": { "checksum": "sha256:e66ffb20855170cda4ec60840ce05e73d69dcc54330c86b24dd89ee96bcd1d73", "enabled": 1 } }, "quota": { "100": { "checksum": "sha256:682232f167f6ecaafcb051df5557addc52b814e923f143bf37a2035fb17315ae", "enabled": 1 } }, "rabbitmq": { "100": { "checksum": "sha256:0fede9cbfe184d19e8ac7bb68a1ce8a110aa45898ca782e3c9daa5649a476fba", "enabled": 1 } }, "radius": { "100": { "checksum": "sha256:01fbaabbb5b83721fe19a813401d94510f6fb260714c3adcc40d54fbb994ef70", "enabled": 1 } }, "radvd": { "100": { "checksum": "sha256:a8e3e2b90df3917dbaf684a1bdf72432d8bf2aa6ec41233e06a2eaf02aa81686", "enabled": 1 } }, "raid": { "100": { "checksum": "sha256:8d5ee75190133ca16f3931a80ba1202b6cc171e6a3b1cba6dc5788a33bc84e0a", "enabled": 1 } }, "rasdaemon": { "100": { "checksum": "sha256:fdf6e82be7b620aaea9c8928edc39344d32dd9b1c4e0f78a6c6fba39bc005b6d", "enabled": 1 } }, "rdisc": { "100": { "checksum": "sha256:4788c42c425e54a8dedb4882a6a2bd2183ad72f980f4217299be830afe275069", "enabled": 1 } }, "readahead": { "100": { "checksum": "sha256:7d65968a2e3d186de718f9f6604f2cce60bd08bab6dbe0e60f60222b228a5744", "enabled": 1 } }, "realmd": { "100": { "checksum": "sha256:78d9abb7263a5c028d7065c0cadcfe14daf3b4aa064e679458f3bf271a69d2e5", "enabled": 1 } }, "redfish-finder": { "100": { "checksum": "sha256:e05fc89dc14e7a723647597786aa62adc255ca1301474ff0c29dff49e4176e4d", "enabled": 1 } }, "redis": { "100": { "checksum": "sha256:825a97c385fbcbfff670278b26a17f91bbfa8585f2219efc48781e0e510bf213", "enabled": 1 } }, "remotelogin": { "100": { "checksum": "sha256:695b31e12a82435b57e11459e99444fec8d09aba051b1a12b8efa765608dc719", "enabled": 1 } }, "restraint": { "400": { "checksum": "sha256:892885a058782b7fdfb5d86e5ec3ecca261363a14a2254652c6a7ff8a52807ae", "enabled": 1 } }, "rhcd": { "100": { "checksum": "sha256:39bc17cbd08c0377eb935fd0ca86b6542752c5ce07cb0f9d9e5d8adfe4306a13", "enabled": 1 } }, "rhcs": { "100": { "checksum": "sha256:3da6785a2c37296fb1ba2a1b621ebccc9e0837d9acf69b3442e75f3a60f2a484", "enabled": 1 } }, "rhgb": { "100": { "checksum": "sha256:912bf2ea73ebbfd1d5fefee37b336a9002345d01f8eb54cb164c28160fc4f1c1", "enabled": 1 } }, "rhnsd": { "100": { "checksum": "sha256:66b1ecc6382afc5032df2921281550af0431befd8cd517c4f8c68cab2eac0e11", "enabled": 1 } }, "rhsmcertd": { "100": { "checksum": "sha256:4ed93113b5ea0760e89533919f86cf1dd26b5587a9d7cf8bd951896fc77d7fa9", "enabled": 1 } }, "rhts": { "400": { "checksum": "sha256:008a840aa2183d0fbf1b3f3bb9542a7ba51c03a1e3a415b188ca49d2e4ed7e51", "enabled": 1 } }, "ricci": { "100": { "checksum": "sha256:3ba51ade82ac9113ee060bb118c88deccc4a7732312c57576fd72a70f40154aa", "enabled": 1 } }, "rngd": { "100": { "checksum": "sha256:b4fc4fbb8572088eb785b643f5d103d5791af96d37e6cce850d671d9291bf70f", "enabled": 1 } }, "roundup": { "100": { "checksum": "sha256:6b4e7757f0422a2c54d93e920ff7b2c5bd894d495065b3827a741a768f042b18", "enabled": 1 } }, "rpc": { "100": { "checksum": "sha256:702d5df73a6865bc249ffb537ad7a0d2388e1540716e4b2f7e844485870e37bb", "enabled": 1 } }, "rpcbind": { "100": { "checksum": "sha256:4cfda0dd9868ff0890c7a612f07c282a8cbe4a319c766d7cf842ed639fc2b34c", "enabled": 1 } }, "rpm": { "100": { "checksum": "sha256:64c59a71e1786fba000398e05773c83fbbd9f92c0341e52cbefd1386357b4e16", "enabled": 1 } }, "rrdcached": { "100": { "checksum": "sha256:2f0c18590911b20c58bbc9db0c9c0c471f4d66171f7400079a2e956366580e24", "enabled": 1 } }, "rshim": { "100": { "checksum": "sha256:f19a726a7c78ddd9aafcf8d2c4b6a57bd05fdc8450a91119e1f0d0abc09151dd", "enabled": 1 } }, "rssh": { "100": { "checksum": "sha256:b29d987a469d59767e7120202e2abad06865eaa84d3eb61d2ae6b7a78c1d6dca", "enabled": 1 } }, "rsync": { "100": { "checksum": "sha256:44e8808dad842eb55d51c204374ef445bd8515701db580d2c91f06ca9949f2f6", "enabled": 1 } }, "rtas": { "100": { "checksum": "sha256:4b1585496c5777fe140f76f11a62df0ddad219336fac090139efbc368520d38c", "enabled": 1 } }, "rtkit": { "100": { "checksum": "sha256:2a990092d1cf38541a49375e9e605d82515a34e19b9ab6b70392afb596e0c612", "enabled": 1 } }, "rwho": { "100": { "checksum": "sha256:80bda9a30a4b5ab4b6b14d7f6c92efbfd5a63658a4b44565a02c2c552cf4a28c", "enabled": 1 } }, "samba": { "100": { "checksum": "sha256:405780af5278be0dd7f89425f91ca1c48527743d2b6876bdbdcc7545d487dc09", "enabled": 1 } }, "sambagui": { "100": { "checksum": "sha256:f76f5b094e42967dc240e161cb187bc528f2f2a3ee2ab93c53c0b15d820c0921", "enabled": 1 } }, "sandboxX": { "100": { "checksum": "sha256:99c31c501752dfcb8460f44b4e363b9d57b85c3ad422a951f13f2d42e5f9f54b", "enabled": 1 } }, "sanlock": { "100": { "checksum": "sha256:8361387196f6c48bbed95c77561bdd324ab96356d6dd0f4874832accc67738a4", "enabled": 1 } }, "sap": { "100": { "checksum": "sha256:89169ffed763d6257769d5ed83185a9eb376145baa60dbf01b4088f37aa663bb", "enabled": 1 } }, "sasl": { "100": { "checksum": "sha256:7727a62bcf612392c76d46f3cc8c22f33c3c87c30a320805ac9844ce68409ecf", "enabled": 1 } }, "sbd": { "100": { "checksum": "sha256:1ad633f30ae0f80052b31090652780dab90b10696c098ac81ea831035a652835", "enabled": 1 } }, "sblim": { "100": { "checksum": "sha256:c9cbfb3894148ab693f0c850232f3a1b1aefe5c5cf5f4a06bc74d44cdd2b52f5", "enabled": 1 } }, "screen": { "100": { "checksum": "sha256:67b8654cf2404ad763f5343ad3ded35f198c26e99b8a9a150143911acc89ac6c", "enabled": 1 } }, "secadm": { "100": { "checksum": "sha256:6ce5485715b3caab30a72313601de971e7118bc2997a2edf6ce7b229e51c2483", "enabled": 1 } }, "sectoolm": { "100": { "checksum": "sha256:9ff7693f6fb994a0a53dc46230b7ce6c4fe6dccc2b2ec2c8ba49f7c1e3f24eea", "enabled": 1 } }, "selinuxutil": { "100": { "checksum": "sha256:c888a4b5fc698c1bf7551bfbc6d6ea7673a5f7f41d2467af7e15ce634c71e2be", "enabled": 1 } }, "sendmail": { "100": { "checksum": "sha256:1ed05c5ce069437c9de8a57326a0329d883ec753f3a11fe4f70a43ad212ec482", "enabled": 1 } }, "sensord": { "100": { "checksum": "sha256:191a531a60c27b33fadbdb48213980f03b68efec3287545eff3592fcdf4bf686", "enabled": 1 } }, "setrans": { "100": { "checksum": "sha256:e6f726edf701657c80853712b94a4bf5dd0430254d93db45804e60a243c51818", "enabled": 1 } }, "setroubleshoot": { "100": { "checksum": "sha256:8a6ef7c3d8ee76e112224e0c4e0b91572db8c85f547bbed6d7ce3f6f6d4383de", "enabled": 1 } }, "seunshare": { "100": { "checksum": "sha256:cc162915cf1fc3cc66616c3224e9e848485198a28868c237adc9d7077791cba8", "enabled": 1 } }, "shorewall": { "100": { "checksum": "sha256:74b5c41b13bd849ce82040012f557fec4b9cfad3a9072f9f17f78400868da558", "enabled": 1 } }, "slocate": { "100": { "checksum": "sha256:91acb71305dfde220ce7574e2ac67af16e6f8630639dc66d494cbf8120d2d07a", "enabled": 1 } }, "slpd": { "100": { "checksum": "sha256:9b8a5c1ff4c21846701eb5e0603cc022f4530c568db6d9fab392e41c0ed64720", "enabled": 1 } }, "slrnpull": { "100": { "checksum": "sha256:bcf004c239b72d23fb4f1e5842272bc20f287cd312ed394464db8cb9218f4377", "enabled": 1 } }, "smartmon": { "100": { "checksum": "sha256:fc3eaf23ee99b98d2ff17a5df04776e8553f490d7f57d49a24061cd49bfaa997", "enabled": 1 } }, "smoltclient": { "100": { "checksum": "sha256:17d8fa5ce4b9402dfb10ad431241cb2a5a1b2f726caa03ae7f1d7d410c2ab6ae", "enabled": 1 } }, "snapper": { "100": { "checksum": "sha256:6506687dbaf850c784d6f2af14197d3c1768514fad98e08fea69e92a780ff65f", "enabled": 1 } }, "snmp": { "100": { "checksum": "sha256:59b6f3643d2f404ef03d749628b6872fd650b5b10851862b4accad8276bc6f29", "enabled": 1 } }, "snort": { "100": { "checksum": "sha256:34b45f69552f2b284b1f6e0876e4a96d1c05c28e4ab42d2bc2a241c03fa73309", "enabled": 1 } }, "sosreport": { "100": { "checksum": "sha256:35ef9c580c4071208af6169ae1059bfee51938d36dbec2bc2354d51ed5dc505d", "enabled": 1 } }, "soundserver": { "100": { "checksum": "sha256:5594f07c04c9057b74df1612012c2515265ee04d58b11bfa46a73531b703c1f7", "enabled": 1 } }, "spamassassin": { "100": { "checksum": "sha256:b00a50f92d0e8ef2789d03756c7bee69f983edfc4a3f409304835ad25133e3a4", "enabled": 1 } }, "speech-dispatcher": { "100": { "checksum": "sha256:874410d4edbbd1f73ef0e69ea40e93054a5d65cfe1556b00f6b474b928400a39", "enabled": 1 } }, "squid": { "100": { "checksum": "sha256:400e9b1c9ace97d2e43b5916b453d189a5c6f60133876f15672a48607edfd0ba", "enabled": 1 } }, "ssh": { "100": { "checksum": "sha256:66beadff1a4ed7e48b3f3cee1444f5f1aaa833d212cdc76068f2f306b8455970", "enabled": 1 } }, "sslh": { "100": { "checksum": "sha256:fd8c0b8cc073d8025ab8754b7885e0375b4e700dd3fcc921c45666829b652de5", "enabled": 1 } }, "sssd": { "100": { "checksum": "sha256:1b2a0e330daa04838742fdcd50a9b539072c58d48e949e4a3ce7933da47cbe3c", "enabled": 1 } }, "staff": { "100": { "checksum": "sha256:2ab07a8deeb7ef4cf09f94bd2ba250166a4d016bd9c581ddd470ab2784baf5e3", "enabled": 1 } }, "stalld": { "100": { "checksum": "sha256:e7caeb60df6f2002f7be4adc7a1506b6fb585e6bb9f4585381c115a90bff4a15", "enabled": 1 } }, "stapserver": { "100": { "checksum": "sha256:836d01ecc314a2b2b4eaaea69ce1e4a03f3274bd8bd25e2b64d0329e6f9d8f32", "enabled": 1 } }, "stratisd": { "100": { "checksum": "sha256:e2c86cd06c00d3ed79b9f7a602b18593d5929156df58e761a04a3cc3ba8be891", "enabled": 1 } }, "stunnel": { "100": { "checksum": "sha256:67fec37a17724a9b059f936b70c199d96906b9bbf703dd8a1670852dbfc7715f", "enabled": 1 } }, "su": { "100": { "checksum": "sha256:dd116a718e125ba88d28936b746a2292088080254134d2001084e2d252ce9379", "enabled": 1 } }, "sudo": { "100": { "checksum": "sha256:df73dbc3f1e232bb5f4d3ba0bd1850eae3c3bc401508b1819c0989b8f67f8033", "enabled": 1 } }, "svnserve": { "100": { "checksum": "sha256:2eb63b8ac8f3038eb1ff3bc18fc5923dee4ac3f609d8a14791300ae835249a9a", "enabled": 1 } }, "swift": { "100": { "checksum": "sha256:d342a188298c1fcd4df99c4235985c50ba2f02a4e53d01cef3de48bc31464ceb", "enabled": 1 } }, "switcheroo": { "100": { "checksum": "sha256:f8f67d2c990489a09a436dbd72704b13d6617fdbbb8c5c2c040a85b584de6a7b", "enabled": 1 } }, "sysadm": { "100": { "checksum": "sha256:a8f135ef10becc2a2ffd4e7faf89932ed4aff16331eb62d59e52ff2a5c0966e7", "enabled": 1 } }, "sysadm_secadm": { "100": { "checksum": "sha256:fc1ca3d8b12406dfef9f012c9275817169fbfafc411969e60d357be3b35835a8", "enabled": 1 } }, "sysnetwork": { "100": { "checksum": "sha256:ab2acab6cbf273ed7e78e577b0e2a85225adba387b1a8908b180b07adb950e6f", "enabled": 1 } }, "sysstat": { "100": { "checksum": "sha256:815d229f0b5a8f8a44cd511b5927febb002596a8aad1b85406d674e59378a0e5", "enabled": 1 } }, "systemd": { "100": { "checksum": "sha256:2a643246c63d64d4c57f3877ff3daca2637b195330920c2efd840ebade3fc20b", "enabled": 1 } }, "tangd": { "100": { "checksum": "sha256:f3896d2de3794d7dd54fea03cbebcdf4e6b63bcc512d2fc14433b3be400f4188", "enabled": 1 } }, "targetd": { "100": { "checksum": "sha256:bbfd79953db88f6db10739803d29b003d83311a21c75604d64ed9fae26da541a", "enabled": 1 } }, "telepathy": { "100": { "checksum": "sha256:71c6423e6318342438fea1ba8a38751b5741b4482ca8ed075dbdd36bc6fda9aa", "enabled": 1 } }, "telnet": { "100": { "checksum": "sha256:f482585c8f26517c6ed8e9203bec4adadec8ebc65840089d7483e31ee24fa679", "enabled": 1 } }, "tftp": { "100": { "checksum": "sha256:a5312c216b56620ca8e69679e99275e793b3de9b6e524db1a5678d22b9909056", "enabled": 1 } }, "tgtd": { "100": { "checksum": "sha256:3a4e10afbea76bb0a825f3e10b6be09c1e380f19737aef7a6171a9744c15b33f", "enabled": 1 } }, "thin": { "100": { "checksum": "sha256:58aac19837bee6fd1c5e3d1e2a9c9900c56b9aff34b643fa9d958399152afbce", "enabled": 1 } }, "thumb": { "100": { "checksum": "sha256:46f7b10654f710546a61324618f68b753849ea0b6a7e11f431922a5c848fae89", "enabled": 1 } }, "tmpreaper": { "100": { "checksum": "sha256:f3d5b0012a6f6d0255e831f608cf0d77f1af38a975b222a7f71cf0821f359246", "enabled": 1 } }, "tomcat": { "100": { "checksum": "sha256:2d749a0f3d39317412feb3388eec0eacb60859891ea7da50373271f03ab66c5a", "enabled": 1 } }, "tuned": { "100": { "checksum": "sha256:5b1a3e31fee719423530b8c7c07b6649ab539d38f2b446a3e6d3f029a65696ae", "enabled": 1 } }, "tvtime": { "100": { "checksum": "sha256:561814e9fa4d9ffa1be3bcc8e27ee1a50260293a17de3db6eb9d4a83e14e8faf", "enabled": 1 } }, "udev": { "100": { "checksum": "sha256:48fac9542e02d0c8f461e03905339795331b4fcb2082e830e83189e50af59040", "enabled": 1 } }, "ulogd": { "100": { "checksum": "sha256:80d84cb83923e4d5d6b9870b4311a67c87609f010c5ffcdcb00ef6e926a8d785", "enabled": 1 } }, "uml": { "100": { "checksum": "sha256:33a8bba7a36dc094b6220c0dfe282a9e57ff280511965c99d654f4e584f960f0", "enabled": 1 } }, "unconfined": { "100": { "checksum": "sha256:38e42ce3f0baba47216f3b50d7bec9ac531a11d659c8807d0bb43b5e5b4ce873", "enabled": 1 } }, "unconfineduser": { "100": { "checksum": "sha256:e9267049c61e87edd481214c8cedfc02cb396789c52a150b58d8fbf0401bd455", "enabled": 1 } }, "unlabelednet": { "100": { "checksum": "sha256:2f55ef3a5145328ed09f316753cec5b85f67c1b43902be5152fc57c4b95c3026", "enabled": 1 } }, "unprivuser": { "100": { "checksum": "sha256:51ec0952bf860ec23e3bfdfd53f3bfad841a4e5b560cc25a9548c9b207504194", "enabled": 1 } }, "updfstab": { "100": { "checksum": "sha256:ef06a218a285a5a01a1e354d6a40f826815203dc323d00ad68e29f85162c24e7", "enabled": 1 } }, "usbmodules": { "100": { "checksum": "sha256:f71781a997aa0d0df5c9baa600b6212105c75cc290bf634a198ed0d5b42a668d", "enabled": 1 } }, "usbmuxd": { "100": { "checksum": "sha256:f58eadcb76889082e3a109afa993bc7eeed39675991d171a13744bc8b61c279a", "enabled": 1 } }, "userdomain": { "100": { "checksum": "sha256:4b8e317234ae08c1f4a80133c8abba35d412f5797db3c4515d0cf051c35af6bd", "enabled": 1 } }, "userhelper": { "100": { "checksum": "sha256:3c2a65084450b2459115a69bb1d382e452a1da63080ac7fdc85bcac36affe1c7", "enabled": 1 } }, "usermanage": { "100": { "checksum": "sha256:ca220cb87bf9790b38738b6f08cc800a2fd0e083960aa4770c9385b897cd31cd", "enabled": 1 } }, "usernetctl": { "100": { "checksum": "sha256:cfcecf645d2d8a59f98135435d535133a39f70f46d9b47a65b15e88a3805861a", "enabled": 1 } }, "uucp": { "100": { "checksum": "sha256:91a33317bdd39510dd305d768e2791d08b207d8384bfca22322ec49f5b26f9bd", "enabled": 1 } }, "uuidd": { "100": { "checksum": "sha256:c500e8df08994b81cc1d743db684060d03bfe4465fc12eea9a4af83a69af307b", "enabled": 1 } }, "varnishd": { "100": { "checksum": "sha256:db1d0917d263b447f9a744edfd4ebfeca697182c853295c7eaf49f1270218858", "enabled": 1 } }, "vdagent": { "100": { "checksum": "sha256:84679e67832759be8220885abe3fa0157305fc8f50efa604b1343e99907925dc", "enabled": 1 } }, "vhostmd": { "100": { "checksum": "sha256:5ca3d53e3b62d5973442d210faf9b9f5f9b5f4935a74074ce4b18836c8d78b19", "enabled": 1 } }, "virt": { "100": { "checksum": "sha256:d8fadd99af0d343c815f006330529911a5106641ed9c7d22a2eb72e0d9d55d2d", "enabled": 1 } }, "virt_supplementary": { "100": { "checksum": "sha256:664ab4aa1e1eca422d2c627a22a9631ac348221893713bd9a4d97a628094b1b0", "enabled": 1 } }, "vlock": { "100": { "checksum": "sha256:e68a71817476b5ebb8ae2e13e9ea9418a31dd64ffe4e156258cb77029635cefa", "enabled": 1 } }, "vmtools": { "100": { "checksum": "sha256:f45c6d89a3305814e44a05c0d8c8f8a4ce8a923d721e83c9579f76d8d8cd909d", "enabled": 1 } }, "vmware": { "100": { "checksum": "sha256:8d828eef8065f2486b815aea04ed491419e3bf17508cf0ce595fca71f872ba38", "enabled": 1 } }, "w3c": { "100": { "checksum": "sha256:76a11dd14f578f940e874ab4d68ca1370ddfcb2585b6a3a955569fadb77d269f", "enabled": 1 } }, "watchdog": { "100": { "checksum": "sha256:17759c6e3a6229e4a40be0b8121751d768f00fd6ea0a872f4fe65bebe2280b30", "enabled": 1 } }, "wdmd": { "100": { "checksum": "sha256:c9c26249a11c4bace4efa998ae826c3cd5178a19d323886a62b7e355ca3d8260", "enabled": 1 } }, "webadm": { "100": { "checksum": "sha256:ea826918681193d37db69c814ee4c753fef3fcca809cd0fad6f924f829eeb9eb", "enabled": 1 } }, "webalizer": { "100": { "checksum": "sha256:a9e221f7f656f9f0b4937c2bd0f7b93124c7f48f4c88fe8ba608db1eaa5f05d1", "enabled": 1 } }, "wine": { "100": { "checksum": "sha256:034bceb856cf79ac9329a4affb6cc53cf29c5bebb089c0ddd486a76148812b89", "enabled": 1 } }, "wireguard": { "100": { "checksum": "sha256:ea40fa389e6fc510f40994b9b4272a6b985c80064b8a4d702d5813d5252487f5", "enabled": 1 } }, "wireshark": { "100": { "checksum": "sha256:308910f855a076bdf38241880815f6640dfba4b21ef1be58112deec3ed858d16", "enabled": 1 } }, "xen": { "100": { "checksum": "sha256:dd07546e8a114e1b7f5056d4c5b0f1256050fe93e867fbbb6c5f52d2c6f77ec6", "enabled": 1 } }, "xguest": { "100": { "checksum": "sha256:870a818c9c3a4e4d24386bfc3fc7565af1c8aeec605b3d4cd819169172bb3e03", "enabled": 1 } }, "xserver": { "100": { "checksum": "sha256:476c08aa43723ad6bb98a7254bc6cdad6ddab4aa63336719c192bbf6f5ba6700", "enabled": 1 } }, "zarafa": { "100": { "checksum": "sha256:e27315e58a548c06561117f2dcf86c67e6937dc1ef2071ee612975457091e40c", "enabled": 1 } }, "zoneminder": { "100": { "checksum": "sha256:a077f44cc6d16684de9a93061ee0f7b212e3f729fdbdf594dee573fe5c30817d", "enabled": 1 } }, "zosremote": { "100": { "checksum": "sha256:8228eda847eeaa7529b089edb8c64763d03100e84117526a67fbb41ea006a2b0", "enabled": 1 } } }, "selinux_priorities": true }, "changed": false } TASK [fedora.linux_system_roles.selinux : Set SELinux modules facts] *********** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:164 Saturday 28 March 2026 19:32:09 -0400 (0:00:02.816) 0:09:45.004 ******** ok: [managed-node12] => { "ansible_facts": { "selinux_checksums": true, "selinux_installed_modules": { "abrt": { "100": { "checksum": "sha256:7bd953bc370c70fe9299b766f8a40a1659e03f7ef4dd6c722c3e182bc90c1c68", "enabled": 1 } }, "accountsd": { "100": { "checksum": "sha256:e8caedff457d24c0562673868860f813a6cf223422bc48524e7cf1e8df7ddeb6", "enabled": 1 } }, "acct": { "100": { "checksum": "sha256:1150e95aa33304027895200fbac6de5d0ec1ada237d1cf255f979bcf712831ba", "enabled": 1 } }, "afs": { "100": { "checksum": "sha256:634c80be00ac898add54ea6d59ead5a6e92e4d06a230b9b4485059070b0a3bde", "enabled": 1 } }, "afterburn": { "100": { "checksum": "sha256:90f08987cd8645d1bc99245841a9f2d0c9858196064df233655623d1b5cfbdde", "enabled": 1 } }, "aide": { "100": { "checksum": "sha256:c59e1e8e511ef99a0e5715ed9dd2c15ea0b522186e683ed8bf715029c4ef325c", "enabled": 1 } }, "alsa": { "100": { "checksum": "sha256:ee1199b88bcd39ff6de202bdef25f1dc7292828d80856fa535fb80454dad000e", "enabled": 1 } }, "amanda": { "100": { "checksum": "sha256:3b9f22d94579c8dd60f827159f6f15a2085d9bb799cbc88d7c1d23ce7a63aab4", "enabled": 1 } }, "anaconda": { "100": { "checksum": "sha256:449d303fa3e44bb7afa7b0a715e9566e1e33fd3368aee1b078529f0225cf56ff", "enabled": 1 } }, "apache": { "100": { "checksum": "sha256:bfefb6205876b2f58e84c1952c749c146f4e2b8107a660e084614b23d60300c8", "enabled": 1 } }, "apm": { "100": { "checksum": "sha256:3a903d39c2d9de406f33790f234fde1f1d0b20bacae36fa0c6bfb5fee9f800c5", "enabled": 1 } }, "application": { "100": { "checksum": "sha256:35030bf2d1dc7ec055a954de113ff7918709262d5c318040b0cbd07018e9ee88", "enabled": 1 } }, "auditadm": { "100": { "checksum": "sha256:5da016180d7da3fa18541f72cc69eb5c9ffebc2851ec3e6150bfd5a73153f860", "enabled": 1 } }, "authlogin": { "100": { "checksum": "sha256:6432b280ab64da2e35f7df339167f29bc9b9dca4c01e8e8a0c409b7a0adbd5d1", "enabled": 1 } }, "automount": { "100": { "checksum": "sha256:856e89b68ecf997f8a33e98c7e4bd2250a43f88790efba170f787434139a8c0b", "enabled": 1 } }, "avahi": { "100": { "checksum": "sha256:78ff1f7154a00c128cbf5c237452baf7ed1cd46cb11378439b64432d1db58d4f", "enabled": 1 } }, "bacula": { "100": { "checksum": "sha256:1e517a22f8a71ea3ef177798685dfb6359b1006205fdc97a0972ff1cf7125f40", "enabled": 1 } }, "base": { "100": { "checksum": "sha256:454cc3d74ae64acf78ad17344d47579841f6b44266c6c3d56f58594918d2e3cc", "enabled": 1 } }, "bind": { "100": { "checksum": "sha256:4d13ddead5cb94be9c944061044e0bd56974a9db9df64f7259593b57d51386d5", "enabled": 1 } }, "blkmapd": { "100": { "checksum": "sha256:00bebe07cf015d4084870d1f0866913ae687801ca2d26e12c00df6823b3bc304", "enabled": 1 } }, "blueman": { "100": { "checksum": "sha256:0cb5bf9ff94cee18667b41dc4d1b988ace9baa06ca99507a91ff3190f4e39d35", "enabled": 1 } }, "bluetooth": { "100": { "checksum": "sha256:233825c029885cb6196920f19b27336b444411b9a15b956c95a2a07b89e9b041", "enabled": 1 } }, "boltd": { "100": { "checksum": "sha256:afaeabb15d1d5e4f3d07865c5213f4a78ae5865d0f782e95d1c599e61b7ed7d3", "enabled": 1 } }, "boothd": { "100": { "checksum": "sha256:2c8ef6be5667ad71b144c8bd4ec606b56cecd4e3ea1d242cbc657c1c993d99af", "enabled": 1 } }, "bootloader": { "100": { "checksum": "sha256:dd35cbec0b5e8f81e3394a60905606fb9d986fd394ad60ccedfcdb60f0137b0b", "enabled": 1 } }, "bootupd": { "100": { "checksum": "sha256:e89032180210c66a288c43d2de3a47b285d38fa239226bd49ae19a1a0488f41c", "enabled": 1 } }, "brltty": { "100": { "checksum": "sha256:96474cc59c799aa0e25123ea9909b4fb319a03f1b5f6cbbf1ae3dcda374815a7", "enabled": 1 } }, "bugzilla": { "100": { "checksum": "sha256:7c8fa6c136fc6624a1dd4345c3484ffbc07c9a4be8b7543d78f0615680cb73cc", "enabled": 1 } }, "cachefilesd": { "100": { "checksum": "sha256:1b066f5d029b5584d34d95007991d218446244f994f3ff802339cd5890e48091", "enabled": 1 } }, "calamaris": { "100": { "checksum": "sha256:60ca58fba194f53faf1c0bc41f8eeeba9ca3de6f2da08f8940b6d1d3093e7c0f", "enabled": 1 } }, "callweaver": { "100": { "checksum": "sha256:815d2bba5c316d5d0334add30dca473daf3fdc85e48785c26c7b47b2ef833823", "enabled": 1 } }, "canna": { "100": { "checksum": "sha256:4ec687f59310bcb03685bec14fec451d393508d1ca5f926209ba967d42673d90", "enabled": 1 } }, "ccs": { "100": { "checksum": "sha256:b6821587c3b2df8dc3ce8de9851cb1be120dfd68e5729141e7a293917029e978", "enabled": 1 } }, "cdrecord": { "100": { "checksum": "sha256:df9850293d6833d206bfb3a875bdf69d0823daf24993b30f962da683032555e2", "enabled": 1 } }, "certmaster": { "100": { "checksum": "sha256:de4651616a6c8dea0dd4b018d3ab32c1506ba75188d1bcab2e04af461eea6040", "enabled": 1 } }, "certmonger": { "100": { "checksum": "sha256:91ab7c5c9df2a80b515c52b105f54e9247b092be7864be939d880b2f94cec862", "enabled": 1 } }, "certwatch": { "100": { "checksum": "sha256:bec8a93b694c60226db8744867c6f87775440937699ac0d023e06e7b7aee1d6b", "enabled": 1 } }, "cfengine": { "100": { "checksum": "sha256:3f5f3b049123ab0a61d1f7a7e6372bd7d2194feb212f2b5bd85a9148f21f7db6", "enabled": 1 } }, "cgroup": { "100": { "checksum": "sha256:0ae822bb67f347f0a88f4ec8584f394e3e10fc11363dcf34b1d583305e76c9e6", "enabled": 1 } }, "chrome": { "100": { "checksum": "sha256:d20dacb3b990c66c37bbf1bbd081a84a0e35f3cdf1501c27a5ec881c3d187d84", "enabled": 1 } }, "chronyd": { "100": { "checksum": "sha256:090e59b1324bf559d79a1ef363fe9bc1bd2adb928f6a95bb1628c92f93063415", "enabled": 1 } }, "cifsutils": { "100": { "checksum": "sha256:80b987a686635b3e05bedf481ef892af7231100a61fbf6ca5e93da17dbb887c3", "enabled": 1 } }, "cinder": { "100": { "checksum": "sha256:9fa130934871404f743c4803af509afa78e56b3ba2f83bd108564858f163329f", "enabled": 1 } }, "cipe": { "100": { "checksum": "sha256:a68798c10fa97ddee5f54ac1d1281ecce65750e4e151076f4ad826187fc647a2", "enabled": 1 } }, "clock": { "100": { "checksum": "sha256:4e04381e36d9df4d9f19ad718b1ddf4686f633f72b24d1161055b1f7280a81d4", "enabled": 1 } }, "clogd": { "100": { "checksum": "sha256:33c562fd35e8b9fc5fdf807c488d1ac4adfa6c3b92dbbf87034a6732478e1bf7", "enabled": 1 } }, "cloudform": { "100": { "checksum": "sha256:8279ce237a5b4ffe5a80db09e71f06bdc8a4838910274ffc4e240ec99c185df5", "enabled": 1 } }, "cmirrord": { "100": { "checksum": "sha256:f89476b4ce6acf51cb0628609027a6c44a90db4ccde4da07505b5332a00b7c63", "enabled": 1 } }, "colord": { "100": { "checksum": "sha256:8a072efaf9d6f3af5ec04477f28ec73585274598b69d2e8f24c8180dcfacb15c", "enabled": 1 } }, "comsat": { "100": { "checksum": "sha256:d5d67d239ca7cd2acbd4c5e15fbbc0f97810139fd352e9966c1e63a7d6ab5188", "enabled": 1 } }, "condor": { "100": { "checksum": "sha256:a4da29d700315627bf480c63220b2b639ec0b87435f9ecca111eed86c1e019cd", "enabled": 1 } }, "conntrackd": { "100": { "checksum": "sha256:36bd297ee2c16ed1564895422c05f51d957f09ef17120ac2efc93dc46d2d81a0", "enabled": 1 } }, "consolekit": { "100": { "checksum": "sha256:053f0dac3d8bc41d9dcbaf9b3f1c2e55ec313e07465db7462fdacf8fd89ce553", "enabled": 1 } }, "coreos_installer": { "100": { "checksum": "sha256:9fb7d00873d78a196b1fb639f107a92cf007803c7eaa2658eba7ed05081acb99", "enabled": 1 } }, "couchdb": { "100": { "checksum": "sha256:59f3c694a3ba5e60ece2b1ddeb5f5bd4f00fdaa67a5c7aa3a8fe7bd302963523", "enabled": 1 } }, "courier": { "100": { "checksum": "sha256:c05ca77b6a73640331abcf4018a9b7f2f3733f9e128bd96d7131ab7ba1fa823c", "enabled": 1 } }, "cpucontrol": { "100": { "checksum": "sha256:0e54e45a5adaa7cc24e6a273e25693919e92f498e42b8e136b7d7bf29be2d6af", "enabled": 1 } }, "cpuplug": { "100": { "checksum": "sha256:629423401aaf5d0f529905a421a461d2f1d7ddbdb94020a140831f8873724c39", "enabled": 1 } }, "cron": { "100": { "checksum": "sha256:7ec2279bb83c931e6f379f45255a0727d207838ab55930f7595e0ab1e95b8db3", "enabled": 1 } }, "ctdb": { "100": { "checksum": "sha256:601b41f04bdd9789e01a1158241a17c7c4f937c88adbc75e9bf8875ee7cb0756", "enabled": 1 } }, "cups": { "100": { "checksum": "sha256:9f9cfd140d7b13b9679ba8b8d7a59366294db02d816d60af2e00a3fff1f6fed9", "enabled": 1 } }, "cyphesis": { "100": { "checksum": "sha256:5d64fbf2f59d2c8ce842a9e8adf39877e41bb1d3e77c374681044aafbd662d7d", "enabled": 1 } }, "cyrus": { "100": { "checksum": "sha256:1ce15bea5149f786d9b714426a2870c43d01107f2e3a6bd4b5b324a166508dbf", "enabled": 1 } }, "daemontools": { "100": { "checksum": "sha256:cd287fe5971d71a4512ad52ad855f427c8b722cf7aec6e884ca646ca3da0df2b", "enabled": 1 } }, "dbadm": { "100": { "checksum": "sha256:f6643411d4b5fbc33bd87d4b3b1d4ea1b5d3659a2092cdee9ecbd4dd700af416", "enabled": 1 } }, "dbskk": { "100": { "checksum": "sha256:41bc4ffe76c9e5c220822efd68a2e55b1126b38f646b7c4016a36263a89e482d", "enabled": 1 } }, "dbus": { "100": { "checksum": "sha256:fb9a0c7ec7a8627b89649e44dd9e2d6e4cf70166b2a55f6509f898695510376b", "enabled": 1 } }, "dcc": { "100": { "checksum": "sha256:8b52f0bebd92342ee6b7e00dfe3e20d3a0f041badd4312b9b22b3d3ab0d1b3b1", "enabled": 1 } }, "denyhosts": { "100": { "checksum": "sha256:22ed092464b3757fcc58749af15cc33319f406db1747f4b28f74feb123969612", "enabled": 1 } }, "devicekit": { "100": { "checksum": "sha256:7633e1cf2075f6323862d89b5e0072681e64e41895b6caabbc8c6b18223dce9c", "enabled": 1 } }, "dhcp": { "100": { "checksum": "sha256:1da30094d8664d16dee43b934829c800003e49304f1540e5b41f9fb12a2df4df", "enabled": 1 } }, "dictd": { "100": { "checksum": "sha256:6cdf81585aeb903ef5da64551f6bde953aeb48f8623a8d416485847541b7b283", "enabled": 1 } }, "dirsrv": { "100": { "checksum": "sha256:1af7de0f7c691873148f17453849b3dee97e78a1e8108755c1c133c05f29b651", "enabled": 1 } }, "distcc": { "100": { "checksum": "sha256:bd9199873915ce6fadfc570fba837765971726dac64a74e1ba74c55dc0b24067", "enabled": 1 } }, "dmesg": { "100": { "checksum": "sha256:1205bd72660c46019cfb8c3a899accaefb280f5f6bda63850ee2b508cc4542d6", "enabled": 1 } }, "dmidecode": { "100": { "checksum": "sha256:b799553c2c0ab0abd040196142394a15d429e15b573df56edd0e150295d6993c", "enabled": 1 } }, "dnsmasq": { "100": { "checksum": "sha256:bdaf9c5be3de423b3d1b72c8bf38e2315fd58ce10ca6a58873c7d3e3a9c8aed2", "enabled": 1 } }, "dovecot": { "100": { "checksum": "sha256:1de79cf621df4cb04b8ee1201f38c91d8a23cfd85928894d4f9a8d3a27dd99e1", "enabled": 1 } }, "dspam": { "100": { "checksum": "sha256:5d8847ac4f68cf59bdc174bc1ce3688f86efbdd4a4563f701cdc74b2fa01504c", "enabled": 1 } }, "fcoe": { "100": { "checksum": "sha256:58fbe8fa7832fec940b7afc7ffe8e4357ddb5a03a662687b928f84029d81c781", "enabled": 1 } }, "fdo": { "100": { "checksum": "sha256:c821191e37683fab6a25fa714edaa75bcd7a81760fa8b547c31e40967875a29c", "enabled": 1 } }, "fedoratp": { "100": { "checksum": "sha256:09288902a734ceef738fc904463b50798ce700c15059c70d092412b12ead156d", "enabled": 1 } }, "fetchmail": { "100": { "checksum": "sha256:9fbdec8e421e1fa27dfea13b163cd0810d404845ee724b6f1b3ca5e6500a42c0", "enabled": 1 } }, "finger": { "100": { "checksum": "sha256:9144a6012aa7771292a276576f811b7948abf4b7fe2e07f05c66d232d5811055", "enabled": 1 } }, "firewalld": { "100": { "checksum": "sha256:ae1f3ce0ff3a003f1db93dbbe09084b0ba32675b332f9930f23f9f5e66f57204", "enabled": 1 } }, "firewallgui": { "100": { "checksum": "sha256:60856e056bdd9de8ffce0f5468846b00616fad40f87d38d5fa73acb74475d83b", "enabled": 1 } }, "firstboot": { "100": { "checksum": "sha256:8d10737fea4fe0dd3ae3725002a8f0c5889a3645ba4894e9dccec01a3e51b3d9", "enabled": 1 } }, "fprintd": { "100": { "checksum": "sha256:260a661a05f5958d32eecc692d9d5350d51ec0ef9e9bf29aad653d8637ceba29", "enabled": 1 } }, "freeipmi": { "100": { "checksum": "sha256:e206bfbfcbe748672784fe52a91a1220965bcae5ff57dab458ade953f0b17b80", "enabled": 1 } }, "freqset": { "100": { "checksum": "sha256:8826b12f85b02168080b03dec5eef5c91283ba1ebf8370022a71170064a97dcc", "enabled": 1 } }, "fstools": { "100": { "checksum": "sha256:00b8b8e23b9e36087646cffa7c5126b0a402ac38a958930d27fd058f78f67987", "enabled": 1 } }, "ftp": { "100": { "checksum": "sha256:181e899c092e42a648f7474f936d3413769842e4a0192dbc91cf587cd1547ffc", "enabled": 1 } }, "fwupd": { "100": { "checksum": "sha256:54578edd17537e1639df33aa54a731059844519c32cb8dee24e31b29f499dc67", "enabled": 1 } }, "games": { "100": { "checksum": "sha256:325a80a2f12fed84077e57ac8725cdbd3449114115ac74904280c05c4d9f1597", "enabled": 1 } }, "geoclue": { "100": { "checksum": "sha256:9ac486b2d71758e95a106894de9c4f5b21506e07caba5d3753964556cb042fab", "enabled": 1 } }, "getty": { "100": { "checksum": "sha256:0a0e0d24bb9866726e90384d92166829d3c43e6086613b425735544745295adf", "enabled": 1 } }, "git": { "100": { "checksum": "sha256:cc208709ab1c0862004f9576e53a62665826c6cdb5f443eb463d8743cc399769", "enabled": 1 } }, "gitosis": { "100": { "checksum": "sha256:9505b4010a4aafa33b27c1a73f02f7fb2ff720e95ef943b40db387b893b7499a", "enabled": 1 } }, "glance": { "100": { "checksum": "sha256:a1966f6618bc0d636a87d83d852abba0b92bcb8aaafe82837b39958954490ad5", "enabled": 1 } }, "glusterd": { "100": { "checksum": "sha256:80108836908472e7859b47ff8ba90d2c629f02666a3246c2dc7e6039ee1dc099", "enabled": 1 } }, "gnome": { "100": { "checksum": "sha256:42e7cda751258014b8bf2492522d20dcc0a1c96027d8261b7996289ad136ee7d", "enabled": 1 } }, "gnome_remote_desktop": { "100": { "checksum": "sha256:840c649229032dfd9b5880f50fcd371e5cc4c87fba7d424f03f3f5f28cb1f686", "enabled": 1 } }, "gpg": { "100": { "checksum": "sha256:ce63d6d0ffc035614b61d82eae48a44485151cb6e93a0617c782116187ab1ad3", "enabled": 1 } }, "gpm": { "100": { "checksum": "sha256:3b3f4538fdffe23885b90ece09b6859afc8a0b7f3314b9b4a60bcb9525776725", "enabled": 1 } }, "gpsd": { "100": { "checksum": "sha256:8184e98e265b9082358f87a8a715bf235f96c31008e60541b742525e7f09bce2", "enabled": 1 } }, "gssproxy": { "100": { "checksum": "sha256:a57b0a11f54bad916a170bf890b15978ad925ccc5e976d9d7b94b6c66f7c2e83", "enabled": 1 } }, "guest": { "100": { "checksum": "sha256:fc4a2c076ee26500d58559dfd29fe267a6f1ec33515064c8daa16448b7aaca9a", "enabled": 1 } }, "hostapd": { "100": { "checksum": "sha256:b13286a614402a3538fc0387f3d7abc30085c382a33e83faed9be57f33b63f45", "enabled": 1 } }, "hostname": { "100": { "checksum": "sha256:37d95ab4a25b542db931edf26632d35e3a969239ff1de338b037e2e5ec506fad", "enabled": 1 } }, "hsqldb": { "100": { "checksum": "sha256:1eab1ed96a9f87898b99be5005c598d35dc079b1ab5a7214ceb6e3e5c50f8810", "enabled": 1 } }, "hwloc": { "100": { "checksum": "sha256:6719dc568ff70220e53b2f1ed86d9a395a2f038d99901396022e4dc63d4ae868", "enabled": 1 } }, "hypervkvp": { "100": { "checksum": "sha256:c280b017518cea08d176260a60012fd4d62882dcdf6bc9fc2005c74573b2240c", "enabled": 1 } }, "ibacm": { "100": { "checksum": "sha256:a6e5ded6ba1592d16d507e4f87b6078156d99e9554184a9912a3a91819ebb5df", "enabled": 1 } }, "ica": { "100": { "checksum": "sha256:a90844f8b8a25de5abadb4887f1b1ac84367f5ae248d9213a90a39859b3e5df3", "enabled": 1 } }, "icecast": { "100": { "checksum": "sha256:40b455ce92e388b7f1eb0c65645000ae54076221c2acce0fa34c6f8d29d6ee67", "enabled": 1 } }, "iiosensorproxy": { "100": { "checksum": "sha256:392808628481e796663a1b99d1340efca31995d4832ec45fe71a939f12c117e7", "enabled": 1 } }, "inetd": { "100": { "checksum": "sha256:59557d1383fbb0a9586e18a4b129912d3ff989dbb853ed29bd0e27dfc160351d", "enabled": 1 } }, "init": { "100": { "checksum": "sha256:c850d134886113631f28665513a0536ca98fce16e53a9b3f146d1449ae9e0ee5", "enabled": 1 } }, "inn": { "100": { "checksum": "sha256:208231fcd39727d36f759dca410d8675e5852b7330f966aa86dc6e37c9abb22b", "enabled": 1 } }, "insights_client": { "100": { "checksum": "sha256:593cf420e0ac5523489f53d4b0cf2af0eaf8821d841f947349963159834a764a", "enabled": 1 } }, "iodine": { "100": { "checksum": "sha256:630a305bf2ae45b8211c97cd029f1ae4247e0a00f936d8595e3cff59570cbd5f", "enabled": 1 } }, "iotop": { "100": { "checksum": "sha256:104ca47441ca07c42c5e4770c1eae2178d2cdb880a174581032c7f846a05fb6e", "enabled": 1 } }, "ipmievd": { "100": { "checksum": "sha256:b0baf75f1edb1c27f1caf49a30874604f82791ee1b1c85c38a06195f8d806b0e", "enabled": 1 } }, "ipsec": { "100": { "checksum": "sha256:ba9aeb152542b5bd253d5a6e3b6aeff3e857615f4f42836c19098d45263fb120", "enabled": 1 } }, "iptables": { "100": { "checksum": "sha256:177e6ff2bd9b8e6800b6138497d26b5cdd005046f6c62f672ecc66701b1251c9", "enabled": 1 } }, "irc": { "100": { "checksum": "sha256:32c9122d027bf6229b8cf18a4d45fc63e38c5b0a3656312854833e4342e0e608", "enabled": 1 } }, "irqbalance": { "100": { "checksum": "sha256:42c6066d4a0751cb1db4526c055b0527a4d9403b45794571ea0dc4c71a666bec", "enabled": 1 } }, "iscsi": { "100": { "checksum": "sha256:997985873de7774ecab07db71db7974723494b65a569e2f852977c25d381359c", "enabled": 1 } }, "isns": { "100": { "checksum": "sha256:80496dfdf52576d83029c83097446766868b289a06aab9e9df110b733594a98e", "enabled": 1 } }, "jabber": { "100": { "checksum": "sha256:c739061ae87ecfdebea9afd0b8021aa3ea154e8e1ef00ba148c82d225ee0c8d2", "enabled": 1 } }, "jetty": { "100": { "checksum": "sha256:81d97ceabbc97f1b524d3e0e60904f5225fcc44996a83d9db67b7ef3d8b18075", "enabled": 1 } }, "jockey": { "100": { "checksum": "sha256:8eecfbe8b3b75068c3c26b6fee1cd79009098d65b962b8a847438e8c31e9d053", "enabled": 1 } }, "journalctl": { "100": { "checksum": "sha256:2ae3ef5124e180523c5f610cbd536ad55c7e0b8e7c551201c29827e59c7c1594", "enabled": 1 } }, "kafs": { "100": { "checksum": "sha256:34f943a522e251615c58df783c4ace2086a1752a3b69e5cbfef2ec5d42234da5", "enabled": 1 } }, "kdump": { "100": { "checksum": "sha256:a0a2baa7b6c1d5ed5e5582f7ffc7d5a8cf2d4e7d034f50b1f3d0972fc9674939", "enabled": 1 } }, "kdumpgui": { "100": { "checksum": "sha256:78f45331782c43239be7330f5b928d9dace6b3ebbfda5e07c1374c462fe06923", "enabled": 1 } }, "keepalived": { "100": { "checksum": "sha256:41297d28af002c4e97c864d3b5ee64f49519b4db72a71b5bf7cd104c2b05af0a", "enabled": 1 } }, "kerberos": { "100": { "checksum": "sha256:2d6c154dc940a2c178931902f7e0c0a1e9f9956055f92fc1bc92b1f2143a674d", "enabled": 1 } }, "keyboardd": { "100": { "checksum": "sha256:33d8e3fbc9f8f48ff7a69685721a782c9f8b62bbbd1878e9bafefad5bdcf51db", "enabled": 1 } }, "keystone": { "100": { "checksum": "sha256:653fca3667c90bf30da196ab61d79ee5afe1ae9703324b2512180986eec8d6c2", "enabled": 1 } }, "keyutils": { "100": { "checksum": "sha256:949cb7c7b62d17c998f63d9970d6fefbf5b3d56d65f729bf21a4f6703135e3f4", "enabled": 1 } }, "kismet": { "100": { "checksum": "sha256:c1e22e4778b465a08d815aaf53d71ba28122b061bef976f522a2304366849a2d", "enabled": 1 } }, "kpatch": { "100": { "checksum": "sha256:a308db644962bd0893fe1b8bc6571460b377f728ac28632852ca3b9c281ed74e", "enabled": 1 } }, "ksmtuned": { "100": { "checksum": "sha256:9925a9acfb6375d93a08546a581a90375ee8582972cfc9d6884204d538b895e6", "enabled": 1 } }, "ktalk": { "100": { "checksum": "sha256:0c9136b18fb83249b1dd825fd497435d852adfaddc9d618ac4d269843a458317", "enabled": 1 } }, "ktls": { "100": { "checksum": "sha256:f15a20f050208e43060eafa61f63a8e722792b76724c7f2fc44c856879ac70ae", "enabled": 1 } }, "ldap": { "100": { "checksum": "sha256:f2322f689c55de691d98651af5bfece0b87608950ccd1a92e9225cfe47415851", "enabled": 1 } }, "libraries": { "100": { "checksum": "sha256:454587674794c66f8b25f9e90154c291e81f6ab93d7c8fb3107068cfcefb797d", "enabled": 1 } }, "likewise": { "100": { "checksum": "sha256:4d05909abe38f75a72561bb28fb279f4771d6886406de5d4665111db56181972", "enabled": 1 } }, "lldpad": { "100": { "checksum": "sha256:dbd4d9d61f7e57925f7a61e0a42d65273d8be168f6e3c77b5467d7b9a93817ff", "enabled": 1 } }, "loadkeys": { "100": { "checksum": "sha256:3121357ab50a02cfc634a5fe4250aff89a1418865918569b77a10cd333cc0018", "enabled": 1 } }, "locallogin": { "100": { "checksum": "sha256:3390d25acd3ece1c7404db8c3db0f5c80278d5063fab9c8f4a8bb5584b5ded16", "enabled": 1 } }, "lockdev": { "100": { "checksum": "sha256:bc457c7839567f5943e06ec31f915742988f5e602c918a3a0d46bde5b94b6c78", "enabled": 1 } }, "logadm": { "100": { "checksum": "sha256:d369ef834c0087ca09871e4dff0128cfc8e39a97e1e3b5bd3001fd752b7af5cb", "enabled": 1 } }, "logging": { "100": { "checksum": "sha256:c739c49825488aa1ae74fd218a5718aa3c859cd1205a1ea581710fe539bfbde6", "enabled": 1 } }, "logrotate": { "100": { "checksum": "sha256:6a59e4d4df92e3d73d66b34035aaf00f5ca0306da24bd478c72a39c7e7844960", "enabled": 1 } }, "logwatch": { "100": { "checksum": "sha256:4196d8e4db83bd37b4e883383dfe8543fb33029b42c557fe5af7e8475b558584", "enabled": 1 } }, "lpd": { "100": { "checksum": "sha256:5427ae01212227c3a719cd1e5664c1290175bd574d7927903102147fa51989c0", "enabled": 1 } }, "lsm": { "100": { "checksum": "sha256:7d1a24bbfe8deb3a3d7aaa92bfc9c922baba1476561b92f828aae226fe9dc3c4", "enabled": 1 } }, "lvm": { "100": { "checksum": "sha256:b772895524eef04c9c79093c837e6033beff39717343d76528a8a85e4a466bb6", "enabled": 1 } }, "mailscanner": { "100": { "checksum": "sha256:5017fd004213b4ceaf374bebf74e35a0084faaf6cede37b78769036a05e34b9e", "enabled": 1 } }, "mandb": { "100": { "checksum": "sha256:7c71eef6360c66869a42a19a34ee30abc1064de8fbbcec0098d2ee57fbedb79a", "enabled": 1 } }, "mcelog": { "100": { "checksum": "sha256:cf5a647f3682f454b850317643416460ce6a7710f3f5fec6b0deac40e3c72e07", "enabled": 1 } }, "mediawiki": { "100": { "checksum": "sha256:067389c903715a12a93937a436e3df918c42a4871765668bea50eca4f02212ba", "enabled": 1 } }, "memcached": { "100": { "checksum": "sha256:6cffe11f14b5c03ba0969f0a3f476455cfac505f2cc1f2d467222a21a3ed7c5c", "enabled": 1 } }, "minissdpd": { "100": { "checksum": "sha256:1ea9c32ae0a7becd1e1879dd4c4b367d450b2721dd8fc3f771081d1568b450f5", "enabled": 1 } }, "miscfiles": { "100": { "checksum": "sha256:ea5057da646444d5450ff16e5dcb82ab338e8fd5fcf5f8dd72e782ef18ad1031", "enabled": 1 } }, "modemmanager": { "100": { "checksum": "sha256:8de073e5cf69c58d03162e50f5fe7537ac8f90c81f02d2906cb10a910a414ec7", "enabled": 1 } }, "modutils": { "100": { "checksum": "sha256:7d0336a428c29ae9a91c18857f594a16f74f5a963607fff966e7de78102ff76b", "enabled": 1 } }, "mojomojo": { "100": { "checksum": "sha256:0464738bfa038fc9ba7ce06c15abf3ff5c2113083e236dd8b96b5d85b1fb51b7", "enabled": 1 } }, "mon_statd": { "100": { "checksum": "sha256:9489c6c732b353e34ed3e5624fe8b73c336f4786c47bc30827b4a5a59b7dca44", "enabled": 1 } }, "motion": { "100": { "checksum": "sha256:660ecac63132d47b51afaeea6f55f74e3a6f25141a4d0d28065e094d7cdc6c75", "enabled": 1 } }, "mount": { "100": { "checksum": "sha256:b0a2d9c52715e340983df89e8adb304ff3790b2564659fd821843a3f172d46d0", "enabled": 1 } }, "mozilla": { "100": { "checksum": "sha256:04b77283c6d821ca98ecb58ef7bd17f6f185168786887a67f4c71cceeaa0476c", "enabled": 1 } }, "mpd": { "100": { "checksum": "sha256:ff9433431cb560a4ff03dc02129289a0f78d1909fe1f3954347f18e318c3cdc4", "enabled": 1 } }, "mptcpd": { "100": { "checksum": "sha256:dc069f3a6c78dc367c39cd7e50fe17948cf9877f3e306f090f1160b07989d503", "enabled": 1 } }, "mrtg": { "100": { "checksum": "sha256:6890958fb0f7c357a4a9600c34e21bf6fc9fd8ef36e9a5ad516b3bf2c1d88bd6", "enabled": 1 } }, "mta": { "100": { "checksum": "sha256:b61027e2a84c3f6fffbc7eb3fd40788bd9dfb036b3e04a8f77d233e10c9f2ec8", "enabled": 1 } }, "mysql": { "100": { "checksum": "sha256:e08540cc55168dd36811b1962936ffacaa21be50b15b9d5d34fa9d55dfd125d8", "enabled": 1 } }, "mythtv": { "100": { "checksum": "sha256:bd730a6479baa42060a62b9c7346dfe21ce28e1a8a432342aa5f302c2cf8ef86", "enabled": 1 } }, "namespace": { "100": { "checksum": "sha256:01131128229571749a7f5df2e65e22e9850789bfe386926cb34e91153ca9e88c", "enabled": 1 } }, "ncftool": { "100": { "checksum": "sha256:edb0f4d496b429a2b09ff9b1d74bd30126b5ee2265a4370f6e992cf9d696de0e", "enabled": 1 } }, "netlabel": { "100": { "checksum": "sha256:b28911955f6731646cd779f6b89c2255238c3e60e1b93d227ce588484694f755", "enabled": 1 } }, "netutils": { "100": { "checksum": "sha256:8bc2fc39e9a6cef06df178607ff3e17604e86d709575d37a60de5c1fd2b9fead", "enabled": 1 } }, "networkmanager": { "100": { "checksum": "sha256:6980bdebf1af99aa6822dc970cd6d5a5b430381aa11e96e40244db39265b5e4f", "enabled": 1 } }, "ninfod": { "100": { "checksum": "sha256:3b235676dff7abd25b2b57fa770833d05561bdd24216f4de1202e9ced52a4f4a", "enabled": 1 } }, "nis": { "100": { "checksum": "sha256:33be40fa2b50df5f7234ead34a6471ff1eea62de62445e509c28e5bc8a730364", "enabled": 1 } }, "nova": { "100": { "checksum": "sha256:0d4fd8a1f74c8e46c18a93794b305dcccf3d50e9db095b659d996712e2905dc0", "enabled": 1 } }, "nscd": { "100": { "checksum": "sha256:d4f61bea290cce978cbb1653866414f9f848bc56ee6491cf022e9131dd2ff5fe", "enabled": 1 } }, "ntop": { "100": { "checksum": "sha256:6f174abacc65b0de9248c39a31210eecb6fdbcd15ecff5bc254fb0d366f83806", "enabled": 1 } }, "numad": { "100": { "checksum": "sha256:5053d74b0f4734131234b4faf6cf7815a725bfd5b73b6acf07deb77a3cced1e2", "enabled": 1 } }, "nvme_stas": { "100": { "checksum": "sha256:0538a3f6b5c469223bfb2740d7365838eedf7ef65b89353645e9d3bf6e17253c", "enabled": 1 } }, "nx": { "100": { "checksum": "sha256:f8b11739918f67700fbef58c2ab5c87a61413acf6aa8b650a014285c0c3684e2", "enabled": 1 } }, "obex": { "100": { "checksum": "sha256:a3b7c308fe73bec0edcfceb85e1e1799927a4d7e25ec4314649b447f670a49ef", "enabled": 1 } }, "oddjob": { "100": { "checksum": "sha256:dd752acc5dc10414a4708dc0bc655d7861bfa74bb20863aa10335dacc53357ba", "enabled": 1 } }, "opafm": { "100": { "checksum": "sha256:bd4724acfb4c0ec9283595e24e29f9926c18e7af0169fd5eb344ed00de6bf393", "enabled": 1 } }, "opendnssec": { "100": { "checksum": "sha256:f1e989b744c90ee0be0978d34da65a84fdd81e5b6aef8ba116560bc157d73f0a", "enabled": 1 } }, "openhpid": { "100": { "checksum": "sha256:d2bd05813a6a5257688f9bb486a1bda49fb169eab4f16c3d503e01883c52bd11", "enabled": 1 } }, "openshift": { "100": { "checksum": "sha256:03597af2e3a916f7c4eb83e1b360b24cad9e86ce814494bd68da602991a70e7e", "enabled": 1 } }, "openshift-origin": { "100": { "checksum": "sha256:66173ad07abd0c8bb7e529350399507549601923afeca8e2ff2b0f80cb9992e3", "enabled": 1 } }, "opensm": { "100": { "checksum": "sha256:3399e9663584d6d1032992f903b7aba4f96f4f0b7a5971faf90eb816cc7655b3", "enabled": 1 } }, "openvswitch": { "100": { "checksum": "sha256:c1107cdfed17e78cabd9094b3f6aa1d9537f70bb4ddfc236983cc5fdc167e8ca", "enabled": 1 } }, "openwsman": { "100": { "checksum": "sha256:c73d5f710032819a6456d1020ef5fc8bb683aeb167b6169f56a295c31b14c72d", "enabled": 1 } }, "oracleasm": { "100": { "checksum": "sha256:d733f8dbbcdcfa398f6f139831236fa6cd0abdf132090435bb647081d2f6a785", "enabled": 1 } }, "osad": { "100": { "checksum": "sha256:44657ecdfa5bc1235f85a50222e025ac4721b24a01af6d167525f7cb0a580c31", "enabled": 1 } }, "pads": { "100": { "checksum": "sha256:92ded69a63e7ecda34b1d8ef17ffae8c9e8075046a724f8f8242f4b66d2eff19", "enabled": 1 } }, "passenger": { "100": { "checksum": "sha256:5dc833e3b3dd31a1af446c7883f6a2b92c40b9192d072ef5de2fda7ddf4f84ad", "enabled": 1 } }, "pcm": { "100": { "checksum": "sha256:924bf0bf4f0b2ea9d633ef46f55793acb2eb3da6379bacd355814507e5ddf67a", "enabled": 1 } }, "pcmcia": { "100": { "checksum": "sha256:8d6835bdf52f73dfd1acf73ce13ea8325b0bd3d0107b0ba86953fe2fbee20330", "enabled": 1 } }, "pcscd": { "100": { "checksum": "sha256:016a326cb4a747756723c0e7d675e4992e8abfd1f51a6c06aa93066bf45412ea", "enabled": 1 } }, "pegasus": { "100": { "checksum": "sha256:ee292c9774f2109ffcef5b2a1ac7ae68e44f719ba40d155f84287fe03a6c01af", "enabled": 1 } }, "permissivedomains": { "100": { "checksum": "sha256:2453bad4ace526f3cf2c60b358e95a5476692ef25da107b10f52f3af27c056d2", "enabled": 1 } }, "pesign": { "100": { "checksum": "sha256:5d77621f8da0f789c1b9ea9ac24925e02e0a7fe2a3a26cd7e5f46085277041bc", "enabled": 1 } }, "pkcs": { "100": { "checksum": "sha256:6cfcf3051765f61e954cd243d3b652cee14d378e4925b12569512e5ae815b40e", "enabled": 1 } }, "pki": { "100": { "checksum": "sha256:07669cb2df2c61ec4cb621f3332f77f351facaaf5232a8a72c61a5ee7bb44d71", "enabled": 1 } }, "plymouthd": { "100": { "checksum": "sha256:24e235787e311d82b99df7b41d724da0e18edc3bc6443f9f83f8d6247e33cbac", "enabled": 1 } }, "podsleuth": { "100": { "checksum": "sha256:2c0350e46ff4eb97af27f63025763c565d7097457d4cde6f46088afe7f8929e9", "enabled": 1 } }, "policykit": { "100": { "checksum": "sha256:6c7d4f4b8227aa55a5f142bbb8faef130cd10710101eb6f0aacb62547db5f49b", "enabled": 1 } }, "polipo": { "100": { "checksum": "sha256:d59109d36dd2868269eb18631e37feb5981db0aa780c55f7e0fb66d897e4f48c", "enabled": 1 } }, "portmap": { "100": { "checksum": "sha256:93a95273e16837c24572e635d58446ed1162ecbfed59695e866058df4dcbec2c", "enabled": 1 } }, "portreserve": { "100": { "checksum": "sha256:f878b2cf560b4bdff33fedf8c8f2011af390b77ee8f9416fe93ebf46153c97d0", "enabled": 1 } }, "postfix": { "100": { "checksum": "sha256:7c128725a61bd30f3e35f39b9a832e5cd3ef435dde58241616b24e28f67ffbe1", "enabled": 1 } }, "postgresql": { "100": { "checksum": "sha256:60153b9f850c92927ce2a61becd9c248ef56dc0ceb7ba990185b98eaa9b011bd", "enabled": 1 } }, "ppp": { "100": { "checksum": "sha256:ae9f1c81d0877b9f40c9d9bb5b862b7c58c73da9045f850a0a72d1b982fada35", "enabled": 1 } }, "prelink": { "100": { "checksum": "sha256:8d550f8b9e80beafd06bc1392e60ecba8e922f8d0e609fb6674de5cf27c8d772", "enabled": 1 } }, "procmail": { "100": { "checksum": "sha256:ff82ca8bf6365948aeaf3c14fbc7ea9a212074d1462a31aa676b542d0d76c882", "enabled": 1 } }, "psad": { "100": { "checksum": "sha256:664148c3f8d4a649714cdbcf15e4862a5e648e0aea83d4530d23866c78c8d8d0", "enabled": 1 } }, "ptchown": { "100": { "checksum": "sha256:d58fb38422b37d406bf3e79136e3a94a40885c08f9c1591975c9a7495b7f606d", "enabled": 1 } }, "pulseaudio": { "100": { "checksum": "sha256:8194c7df0ea3abd18f07481b0181e01c5fddb21ebb594ed5b20bc1ced555fb27", "enabled": 1 } }, "qatlib": { "100": { "checksum": "sha256:ef1377e6864d9b5049866f6f0c3986e474499f1bb0082e9430f208e2c9d84b54", "enabled": 1 } }, "qgs": { "100": { "checksum": "sha256:add48a13d9b3cc5c82c73c2ca7d72db10b074970c14e26d58b88f670f9221655", "enabled": 1 } }, "qmail": { "100": { "checksum": "sha256:c5e1779123c640fc55da0871bfd96bb124d8c9b50b9065136c025c83364f453e", "enabled": 1 } }, "qpid": { "100": { "checksum": "sha256:71a7ff78c03cde811d19a4c115de8a898007bdf437a9350d4708b3f9142481c6", "enabled": 1 } }, "quantum": { "100": { "checksum": "sha256:e66ffb20855170cda4ec60840ce05e73d69dcc54330c86b24dd89ee96bcd1d73", "enabled": 1 } }, "quota": { "100": { "checksum": "sha256:682232f167f6ecaafcb051df5557addc52b814e923f143bf37a2035fb17315ae", "enabled": 1 } }, "rabbitmq": { "100": { "checksum": "sha256:0fede9cbfe184d19e8ac7bb68a1ce8a110aa45898ca782e3c9daa5649a476fba", "enabled": 1 } }, "radius": { "100": { "checksum": "sha256:01fbaabbb5b83721fe19a813401d94510f6fb260714c3adcc40d54fbb994ef70", "enabled": 1 } }, "radvd": { "100": { "checksum": "sha256:a8e3e2b90df3917dbaf684a1bdf72432d8bf2aa6ec41233e06a2eaf02aa81686", "enabled": 1 } }, "raid": { "100": { "checksum": "sha256:8d5ee75190133ca16f3931a80ba1202b6cc171e6a3b1cba6dc5788a33bc84e0a", "enabled": 1 } }, "rasdaemon": { "100": { "checksum": "sha256:fdf6e82be7b620aaea9c8928edc39344d32dd9b1c4e0f78a6c6fba39bc005b6d", "enabled": 1 } }, "rdisc": { "100": { "checksum": "sha256:4788c42c425e54a8dedb4882a6a2bd2183ad72f980f4217299be830afe275069", "enabled": 1 } }, "readahead": { "100": { "checksum": "sha256:7d65968a2e3d186de718f9f6604f2cce60bd08bab6dbe0e60f60222b228a5744", "enabled": 1 } }, "realmd": { "100": { "checksum": "sha256:78d9abb7263a5c028d7065c0cadcfe14daf3b4aa064e679458f3bf271a69d2e5", "enabled": 1 } }, "redfish-finder": { "100": { "checksum": "sha256:e05fc89dc14e7a723647597786aa62adc255ca1301474ff0c29dff49e4176e4d", "enabled": 1 } }, "redis": { "100": { "checksum": "sha256:825a97c385fbcbfff670278b26a17f91bbfa8585f2219efc48781e0e510bf213", "enabled": 1 } }, "remotelogin": { "100": { "checksum": "sha256:695b31e12a82435b57e11459e99444fec8d09aba051b1a12b8efa765608dc719", "enabled": 1 } }, "restraint": { "400": { "checksum": "sha256:892885a058782b7fdfb5d86e5ec3ecca261363a14a2254652c6a7ff8a52807ae", "enabled": 1 } }, "rhcd": { "100": { "checksum": "sha256:39bc17cbd08c0377eb935fd0ca86b6542752c5ce07cb0f9d9e5d8adfe4306a13", "enabled": 1 } }, "rhcs": { "100": { "checksum": "sha256:3da6785a2c37296fb1ba2a1b621ebccc9e0837d9acf69b3442e75f3a60f2a484", "enabled": 1 } }, "rhgb": { "100": { "checksum": "sha256:912bf2ea73ebbfd1d5fefee37b336a9002345d01f8eb54cb164c28160fc4f1c1", "enabled": 1 } }, "rhnsd": { "100": { "checksum": "sha256:66b1ecc6382afc5032df2921281550af0431befd8cd517c4f8c68cab2eac0e11", "enabled": 1 } }, "rhsmcertd": { "100": { "checksum": "sha256:4ed93113b5ea0760e89533919f86cf1dd26b5587a9d7cf8bd951896fc77d7fa9", "enabled": 1 } }, "rhts": { "400": { "checksum": "sha256:008a840aa2183d0fbf1b3f3bb9542a7ba51c03a1e3a415b188ca49d2e4ed7e51", "enabled": 1 } }, "ricci": { "100": { "checksum": "sha256:3ba51ade82ac9113ee060bb118c88deccc4a7732312c57576fd72a70f40154aa", "enabled": 1 } }, "rngd": { "100": { "checksum": "sha256:b4fc4fbb8572088eb785b643f5d103d5791af96d37e6cce850d671d9291bf70f", "enabled": 1 } }, "roundup": { "100": { "checksum": "sha256:6b4e7757f0422a2c54d93e920ff7b2c5bd894d495065b3827a741a768f042b18", "enabled": 1 } }, "rpc": { "100": { "checksum": "sha256:702d5df73a6865bc249ffb537ad7a0d2388e1540716e4b2f7e844485870e37bb", "enabled": 1 } }, "rpcbind": { "100": { "checksum": "sha256:4cfda0dd9868ff0890c7a612f07c282a8cbe4a319c766d7cf842ed639fc2b34c", "enabled": 1 } }, "rpm": { "100": { "checksum": "sha256:64c59a71e1786fba000398e05773c83fbbd9f92c0341e52cbefd1386357b4e16", "enabled": 1 } }, "rrdcached": { "100": { "checksum": "sha256:2f0c18590911b20c58bbc9db0c9c0c471f4d66171f7400079a2e956366580e24", "enabled": 1 } }, "rshim": { "100": { "checksum": "sha256:f19a726a7c78ddd9aafcf8d2c4b6a57bd05fdc8450a91119e1f0d0abc09151dd", "enabled": 1 } }, "rssh": { "100": { "checksum": "sha256:b29d987a469d59767e7120202e2abad06865eaa84d3eb61d2ae6b7a78c1d6dca", "enabled": 1 } }, "rsync": { "100": { "checksum": "sha256:44e8808dad842eb55d51c204374ef445bd8515701db580d2c91f06ca9949f2f6", "enabled": 1 } }, "rtas": { "100": { "checksum": "sha256:4b1585496c5777fe140f76f11a62df0ddad219336fac090139efbc368520d38c", "enabled": 1 } }, "rtkit": { "100": { "checksum": "sha256:2a990092d1cf38541a49375e9e605d82515a34e19b9ab6b70392afb596e0c612", "enabled": 1 } }, "rwho": { "100": { "checksum": "sha256:80bda9a30a4b5ab4b6b14d7f6c92efbfd5a63658a4b44565a02c2c552cf4a28c", "enabled": 1 } }, "samba": { "100": { "checksum": "sha256:405780af5278be0dd7f89425f91ca1c48527743d2b6876bdbdcc7545d487dc09", "enabled": 1 } }, "sambagui": { "100": { "checksum": "sha256:f76f5b094e42967dc240e161cb187bc528f2f2a3ee2ab93c53c0b15d820c0921", "enabled": 1 } }, "sandboxX": { "100": { "checksum": "sha256:99c31c501752dfcb8460f44b4e363b9d57b85c3ad422a951f13f2d42e5f9f54b", "enabled": 1 } }, "sanlock": { "100": { "checksum": "sha256:8361387196f6c48bbed95c77561bdd324ab96356d6dd0f4874832accc67738a4", "enabled": 1 } }, "sap": { "100": { "checksum": "sha256:89169ffed763d6257769d5ed83185a9eb376145baa60dbf01b4088f37aa663bb", "enabled": 1 } }, "sasl": { "100": { "checksum": "sha256:7727a62bcf612392c76d46f3cc8c22f33c3c87c30a320805ac9844ce68409ecf", "enabled": 1 } }, "sbd": { "100": { "checksum": "sha256:1ad633f30ae0f80052b31090652780dab90b10696c098ac81ea831035a652835", "enabled": 1 } }, "sblim": { "100": { "checksum": "sha256:c9cbfb3894148ab693f0c850232f3a1b1aefe5c5cf5f4a06bc74d44cdd2b52f5", "enabled": 1 } }, "screen": { "100": { "checksum": "sha256:67b8654cf2404ad763f5343ad3ded35f198c26e99b8a9a150143911acc89ac6c", "enabled": 1 } }, "secadm": { "100": { "checksum": "sha256:6ce5485715b3caab30a72313601de971e7118bc2997a2edf6ce7b229e51c2483", "enabled": 1 } }, "sectoolm": { "100": { "checksum": "sha256:9ff7693f6fb994a0a53dc46230b7ce6c4fe6dccc2b2ec2c8ba49f7c1e3f24eea", "enabled": 1 } }, "selinuxutil": { "100": { "checksum": "sha256:c888a4b5fc698c1bf7551bfbc6d6ea7673a5f7f41d2467af7e15ce634c71e2be", "enabled": 1 } }, "sendmail": { "100": { "checksum": "sha256:1ed05c5ce069437c9de8a57326a0329d883ec753f3a11fe4f70a43ad212ec482", "enabled": 1 } }, "sensord": { "100": { "checksum": "sha256:191a531a60c27b33fadbdb48213980f03b68efec3287545eff3592fcdf4bf686", "enabled": 1 } }, "setrans": { "100": { "checksum": "sha256:e6f726edf701657c80853712b94a4bf5dd0430254d93db45804e60a243c51818", "enabled": 1 } }, "setroubleshoot": { "100": { "checksum": "sha256:8a6ef7c3d8ee76e112224e0c4e0b91572db8c85f547bbed6d7ce3f6f6d4383de", "enabled": 1 } }, "seunshare": { "100": { "checksum": "sha256:cc162915cf1fc3cc66616c3224e9e848485198a28868c237adc9d7077791cba8", "enabled": 1 } }, "shorewall": { "100": { "checksum": "sha256:74b5c41b13bd849ce82040012f557fec4b9cfad3a9072f9f17f78400868da558", "enabled": 1 } }, "slocate": { "100": { "checksum": "sha256:91acb71305dfde220ce7574e2ac67af16e6f8630639dc66d494cbf8120d2d07a", "enabled": 1 } }, "slpd": { "100": { "checksum": "sha256:9b8a5c1ff4c21846701eb5e0603cc022f4530c568db6d9fab392e41c0ed64720", "enabled": 1 } }, "slrnpull": { "100": { "checksum": "sha256:bcf004c239b72d23fb4f1e5842272bc20f287cd312ed394464db8cb9218f4377", "enabled": 1 } }, "smartmon": { "100": { "checksum": "sha256:fc3eaf23ee99b98d2ff17a5df04776e8553f490d7f57d49a24061cd49bfaa997", "enabled": 1 } }, "smoltclient": { "100": { "checksum": "sha256:17d8fa5ce4b9402dfb10ad431241cb2a5a1b2f726caa03ae7f1d7d410c2ab6ae", "enabled": 1 } }, "snapper": { "100": { "checksum": "sha256:6506687dbaf850c784d6f2af14197d3c1768514fad98e08fea69e92a780ff65f", "enabled": 1 } }, "snmp": { "100": { "checksum": "sha256:59b6f3643d2f404ef03d749628b6872fd650b5b10851862b4accad8276bc6f29", "enabled": 1 } }, "snort": { "100": { "checksum": "sha256:34b45f69552f2b284b1f6e0876e4a96d1c05c28e4ab42d2bc2a241c03fa73309", "enabled": 1 } }, "sosreport": { "100": { "checksum": "sha256:35ef9c580c4071208af6169ae1059bfee51938d36dbec2bc2354d51ed5dc505d", "enabled": 1 } }, "soundserver": { "100": { "checksum": "sha256:5594f07c04c9057b74df1612012c2515265ee04d58b11bfa46a73531b703c1f7", "enabled": 1 } }, "spamassassin": { "100": { "checksum": "sha256:b00a50f92d0e8ef2789d03756c7bee69f983edfc4a3f409304835ad25133e3a4", "enabled": 1 } }, "speech-dispatcher": { "100": { "checksum": "sha256:874410d4edbbd1f73ef0e69ea40e93054a5d65cfe1556b00f6b474b928400a39", "enabled": 1 } }, "squid": { "100": { "checksum": "sha256:400e9b1c9ace97d2e43b5916b453d189a5c6f60133876f15672a48607edfd0ba", "enabled": 1 } }, "ssh": { "100": { "checksum": "sha256:66beadff1a4ed7e48b3f3cee1444f5f1aaa833d212cdc76068f2f306b8455970", "enabled": 1 } }, "sslh": { "100": { "checksum": "sha256:fd8c0b8cc073d8025ab8754b7885e0375b4e700dd3fcc921c45666829b652de5", "enabled": 1 } }, "sssd": { "100": { "checksum": "sha256:1b2a0e330daa04838742fdcd50a9b539072c58d48e949e4a3ce7933da47cbe3c", "enabled": 1 } }, "staff": { "100": { "checksum": "sha256:2ab07a8deeb7ef4cf09f94bd2ba250166a4d016bd9c581ddd470ab2784baf5e3", "enabled": 1 } }, "stalld": { "100": { "checksum": "sha256:e7caeb60df6f2002f7be4adc7a1506b6fb585e6bb9f4585381c115a90bff4a15", "enabled": 1 } }, "stapserver": { "100": { "checksum": "sha256:836d01ecc314a2b2b4eaaea69ce1e4a03f3274bd8bd25e2b64d0329e6f9d8f32", "enabled": 1 } }, "stratisd": { "100": { "checksum": "sha256:e2c86cd06c00d3ed79b9f7a602b18593d5929156df58e761a04a3cc3ba8be891", "enabled": 1 } }, "stunnel": { "100": { "checksum": "sha256:67fec37a17724a9b059f936b70c199d96906b9bbf703dd8a1670852dbfc7715f", "enabled": 1 } }, "su": { "100": { "checksum": "sha256:dd116a718e125ba88d28936b746a2292088080254134d2001084e2d252ce9379", "enabled": 1 } }, "sudo": { "100": { "checksum": "sha256:df73dbc3f1e232bb5f4d3ba0bd1850eae3c3bc401508b1819c0989b8f67f8033", "enabled": 1 } }, "svnserve": { "100": { "checksum": "sha256:2eb63b8ac8f3038eb1ff3bc18fc5923dee4ac3f609d8a14791300ae835249a9a", "enabled": 1 } }, "swift": { "100": { "checksum": "sha256:d342a188298c1fcd4df99c4235985c50ba2f02a4e53d01cef3de48bc31464ceb", "enabled": 1 } }, "switcheroo": { "100": { "checksum": "sha256:f8f67d2c990489a09a436dbd72704b13d6617fdbbb8c5c2c040a85b584de6a7b", "enabled": 1 } }, "sysadm": { "100": { "checksum": "sha256:a8f135ef10becc2a2ffd4e7faf89932ed4aff16331eb62d59e52ff2a5c0966e7", "enabled": 1 } }, "sysadm_secadm": { "100": { "checksum": "sha256:fc1ca3d8b12406dfef9f012c9275817169fbfafc411969e60d357be3b35835a8", "enabled": 1 } }, "sysnetwork": { "100": { "checksum": "sha256:ab2acab6cbf273ed7e78e577b0e2a85225adba387b1a8908b180b07adb950e6f", "enabled": 1 } }, "sysstat": { "100": { "checksum": "sha256:815d229f0b5a8f8a44cd511b5927febb002596a8aad1b85406d674e59378a0e5", "enabled": 1 } }, "systemd": { "100": { "checksum": "sha256:2a643246c63d64d4c57f3877ff3daca2637b195330920c2efd840ebade3fc20b", "enabled": 1 } }, "tangd": { "100": { "checksum": "sha256:f3896d2de3794d7dd54fea03cbebcdf4e6b63bcc512d2fc14433b3be400f4188", "enabled": 1 } }, "targetd": { "100": { "checksum": "sha256:bbfd79953db88f6db10739803d29b003d83311a21c75604d64ed9fae26da541a", "enabled": 1 } }, "telepathy": { "100": { "checksum": "sha256:71c6423e6318342438fea1ba8a38751b5741b4482ca8ed075dbdd36bc6fda9aa", "enabled": 1 } }, "telnet": { "100": { "checksum": "sha256:f482585c8f26517c6ed8e9203bec4adadec8ebc65840089d7483e31ee24fa679", "enabled": 1 } }, "tftp": { "100": { "checksum": "sha256:a5312c216b56620ca8e69679e99275e793b3de9b6e524db1a5678d22b9909056", "enabled": 1 } }, "tgtd": { "100": { "checksum": "sha256:3a4e10afbea76bb0a825f3e10b6be09c1e380f19737aef7a6171a9744c15b33f", "enabled": 1 } }, "thin": { "100": { "checksum": "sha256:58aac19837bee6fd1c5e3d1e2a9c9900c56b9aff34b643fa9d958399152afbce", "enabled": 1 } }, "thumb": { "100": { "checksum": "sha256:46f7b10654f710546a61324618f68b753849ea0b6a7e11f431922a5c848fae89", "enabled": 1 } }, "tmpreaper": { "100": { "checksum": "sha256:f3d5b0012a6f6d0255e831f608cf0d77f1af38a975b222a7f71cf0821f359246", "enabled": 1 } }, "tomcat": { "100": { "checksum": "sha256:2d749a0f3d39317412feb3388eec0eacb60859891ea7da50373271f03ab66c5a", "enabled": 1 } }, "tuned": { "100": { "checksum": "sha256:5b1a3e31fee719423530b8c7c07b6649ab539d38f2b446a3e6d3f029a65696ae", "enabled": 1 } }, "tvtime": { "100": { "checksum": "sha256:561814e9fa4d9ffa1be3bcc8e27ee1a50260293a17de3db6eb9d4a83e14e8faf", "enabled": 1 } }, "udev": { "100": { "checksum": "sha256:48fac9542e02d0c8f461e03905339795331b4fcb2082e830e83189e50af59040", "enabled": 1 } }, "ulogd": { "100": { "checksum": "sha256:80d84cb83923e4d5d6b9870b4311a67c87609f010c5ffcdcb00ef6e926a8d785", "enabled": 1 } }, "uml": { "100": { "checksum": "sha256:33a8bba7a36dc094b6220c0dfe282a9e57ff280511965c99d654f4e584f960f0", "enabled": 1 } }, "unconfined": { "100": { "checksum": "sha256:38e42ce3f0baba47216f3b50d7bec9ac531a11d659c8807d0bb43b5e5b4ce873", "enabled": 1 } }, "unconfineduser": { "100": { "checksum": "sha256:e9267049c61e87edd481214c8cedfc02cb396789c52a150b58d8fbf0401bd455", "enabled": 1 } }, "unlabelednet": { "100": { "checksum": "sha256:2f55ef3a5145328ed09f316753cec5b85f67c1b43902be5152fc57c4b95c3026", "enabled": 1 } }, "unprivuser": { "100": { "checksum": "sha256:51ec0952bf860ec23e3bfdfd53f3bfad841a4e5b560cc25a9548c9b207504194", "enabled": 1 } }, "updfstab": { "100": { "checksum": "sha256:ef06a218a285a5a01a1e354d6a40f826815203dc323d00ad68e29f85162c24e7", "enabled": 1 } }, "usbmodules": { "100": { "checksum": "sha256:f71781a997aa0d0df5c9baa600b6212105c75cc290bf634a198ed0d5b42a668d", "enabled": 1 } }, "usbmuxd": { "100": { "checksum": "sha256:f58eadcb76889082e3a109afa993bc7eeed39675991d171a13744bc8b61c279a", "enabled": 1 } }, "userdomain": { "100": { "checksum": "sha256:4b8e317234ae08c1f4a80133c8abba35d412f5797db3c4515d0cf051c35af6bd", "enabled": 1 } }, "userhelper": { "100": { "checksum": "sha256:3c2a65084450b2459115a69bb1d382e452a1da63080ac7fdc85bcac36affe1c7", "enabled": 1 } }, "usermanage": { "100": { "checksum": "sha256:ca220cb87bf9790b38738b6f08cc800a2fd0e083960aa4770c9385b897cd31cd", "enabled": 1 } }, "usernetctl": { "100": { "checksum": "sha256:cfcecf645d2d8a59f98135435d535133a39f70f46d9b47a65b15e88a3805861a", "enabled": 1 } }, "uucp": { "100": { "checksum": "sha256:91a33317bdd39510dd305d768e2791d08b207d8384bfca22322ec49f5b26f9bd", "enabled": 1 } }, "uuidd": { "100": { "checksum": "sha256:c500e8df08994b81cc1d743db684060d03bfe4465fc12eea9a4af83a69af307b", "enabled": 1 } }, "varnishd": { "100": { "checksum": "sha256:db1d0917d263b447f9a744edfd4ebfeca697182c853295c7eaf49f1270218858", "enabled": 1 } }, "vdagent": { "100": { "checksum": "sha256:84679e67832759be8220885abe3fa0157305fc8f50efa604b1343e99907925dc", "enabled": 1 } }, "vhostmd": { "100": { "checksum": "sha256:5ca3d53e3b62d5973442d210faf9b9f5f9b5f4935a74074ce4b18836c8d78b19", "enabled": 1 } }, "virt": { "100": { "checksum": "sha256:d8fadd99af0d343c815f006330529911a5106641ed9c7d22a2eb72e0d9d55d2d", "enabled": 1 } }, "virt_supplementary": { "100": { "checksum": "sha256:664ab4aa1e1eca422d2c627a22a9631ac348221893713bd9a4d97a628094b1b0", "enabled": 1 } }, "vlock": { "100": { "checksum": "sha256:e68a71817476b5ebb8ae2e13e9ea9418a31dd64ffe4e156258cb77029635cefa", "enabled": 1 } }, "vmtools": { "100": { "checksum": "sha256:f45c6d89a3305814e44a05c0d8c8f8a4ce8a923d721e83c9579f76d8d8cd909d", "enabled": 1 } }, "vmware": { "100": { "checksum": "sha256:8d828eef8065f2486b815aea04ed491419e3bf17508cf0ce595fca71f872ba38", "enabled": 1 } }, "w3c": { "100": { "checksum": "sha256:76a11dd14f578f940e874ab4d68ca1370ddfcb2585b6a3a955569fadb77d269f", "enabled": 1 } }, "watchdog": { "100": { "checksum": "sha256:17759c6e3a6229e4a40be0b8121751d768f00fd6ea0a872f4fe65bebe2280b30", "enabled": 1 } }, "wdmd": { "100": { "checksum": "sha256:c9c26249a11c4bace4efa998ae826c3cd5178a19d323886a62b7e355ca3d8260", "enabled": 1 } }, "webadm": { "100": { "checksum": "sha256:ea826918681193d37db69c814ee4c753fef3fcca809cd0fad6f924f829eeb9eb", "enabled": 1 } }, "webalizer": { "100": { "checksum": "sha256:a9e221f7f656f9f0b4937c2bd0f7b93124c7f48f4c88fe8ba608db1eaa5f05d1", "enabled": 1 } }, "wine": { "100": { "checksum": "sha256:034bceb856cf79ac9329a4affb6cc53cf29c5bebb089c0ddd486a76148812b89", "enabled": 1 } }, "wireguard": { "100": { "checksum": "sha256:ea40fa389e6fc510f40994b9b4272a6b985c80064b8a4d702d5813d5252487f5", "enabled": 1 } }, "wireshark": { "100": { "checksum": "sha256:308910f855a076bdf38241880815f6640dfba4b21ef1be58112deec3ed858d16", "enabled": 1 } }, "xen": { "100": { "checksum": "sha256:dd07546e8a114e1b7f5056d4c5b0f1256050fe93e867fbbb6c5f52d2c6f77ec6", "enabled": 1 } }, "xguest": { "100": { "checksum": "sha256:870a818c9c3a4e4d24386bfc3fc7565af1c8aeec605b3d4cd819169172bb3e03", "enabled": 1 } }, "xserver": { "100": { "checksum": "sha256:476c08aa43723ad6bb98a7254bc6cdad6ddab4aa63336719c192bbf6f5ba6700", "enabled": 1 } }, "zarafa": { "100": { "checksum": "sha256:e27315e58a548c06561117f2dcf86c67e6937dc1ef2071ee612975457091e40c", "enabled": 1 } }, "zoneminder": { "100": { "checksum": "sha256:a077f44cc6d16684de9a93061ee0f7b212e3f729fdbdf594dee573fe5c30817d", "enabled": 1 } }, "zosremote": { "100": { "checksum": "sha256:8228eda847eeaa7529b089edb8c64763d03100e84117526a67fbb41ea006a2b0", "enabled": 1 } } }, "selinux_priorities": true }, "changed": false } TASK [fedora.linux_system_roles.selinux : Load SELinux modules] **************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:170 Saturday 28 March 2026 19:32:10 -0400 (0:00:00.108) 0:09:45.113 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "selinux_modules is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Restore SELinux labels on filesystem tree] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:183 Saturday 28 March 2026 19:32:10 -0400 (0:00:00.029) 0:09:45.143 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Restore SELinux labels on filesystem tree in check mode] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:191 Saturday 28 March 2026 19:32:10 -0400 (0:00:00.020) 0:09:45.163 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.nbde_server : Stat the tangd custom port systemd directory] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:14 Saturday 28 March 2026 19:32:10 -0400 (0:00:00.024) 0:09:45.188 ******** ok: [managed-node12] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.nbde_server : Get a list of files in the tangd custom directory] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:19 Saturday 28 March 2026 19:32:10 -0400 (0:00:00.370) 0:09:45.558 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "__nbde_server_tangd_dir_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.nbde_server : Manage tangd custom port systemd directory] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:35 Saturday 28 March 2026 19:32:10 -0400 (0:00:00.025) 0:09:45.583 ******** changed: [managed-node12] => { "changed": true, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/systemd/system/tangd.socket.d", "secontext": "unconfined_u:object_r:systemd_unit_file_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.nbde_server : Creates the file with the port entry that we want tangd to listen to] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:44 Saturday 28 March 2026 19:32:10 -0400 (0:00:00.511) 0:09:46.095 ******** changed: [managed-node12] => { "changed": true, "checksum": "cab519df8c21e60fd06ac780e2c7bd41ad441042", "dest": "/etc/systemd/system/tangd.socket.d/override.conf", "gid": 0, "group": "root", "md5sum": "fc727969e0bd264a9cc7f9c6bc56714c", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:tangd_unit_file_t:s0", "size": 90, "src": "/root/.ansible/tmp/ansible-tmp-1774740731.0499904-237431-125855065496509/.source.conf", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.nbde_server : Set flag to to tell main that the port has changed] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:53 Saturday 28 March 2026 19:32:11 -0400 (0:00:00.875) 0:09:46.970 ******** ok: [managed-node12] => { "ansible_facts": { "__nbde_server_port_changed": true }, "changed": false } TASK [Ensure the desired port is added to firewalld] *************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:57 Saturday 28 March 2026 19:32:11 -0400 (0:00:00.117) 0:09:47.088 ******** included: fedora.linux_system_roles.firewall for managed-node12 TASK [fedora.linux_system_roles.firewall : Set platform/version specific variables] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:2 Saturday 28 March 2026 19:32:12 -0400 (0:00:00.233) 0:09:47.321 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/set_vars.yml for managed-node12 TASK [fedora.linux_system_roles.firewall : Ensure ansible_facts used by role] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/set_vars.yml:2 Saturday 28 March 2026 19:32:12 -0400 (0:00:00.066) 0:09:47.388 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "__firewall_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check if system is ostree] ********** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/set_vars.yml:10 Saturday 28 March 2026 19:32:12 -0400 (0:00:00.052) 0:09:47.441 ******** ok: [managed-node12] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.firewall : Set flag to indicate system is ostree] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/set_vars.yml:15 Saturday 28 March 2026 19:32:12 -0400 (0:00:00.401) 0:09:47.842 ******** ok: [managed-node12] => { "ansible_facts": { "__firewall_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.firewall : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/set_vars.yml:22 Saturday 28 March 2026 19:32:12 -0400 (0:00:00.055) 0:09:47.897 ******** ok: [managed-node12] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.firewall : Set flag if transactional-update exists] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/set_vars.yml:27 Saturday 28 March 2026 19:32:13 -0400 (0:00:00.409) 0:09:48.307 ******** ok: [managed-node12] => { "ansible_facts": { "__firewall_is_transactional": false }, "changed": false } TASK [fedora.linux_system_roles.firewall : Set platform/version specific variables] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/set_vars.yml:31 Saturday 28 March 2026 19:32:13 -0400 (0:00:00.029) 0:09:48.337 ******** skipping: [managed-node12] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node12] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node12] => (item=CentOS_10.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS_10.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node12] => (item=CentOS_10.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS_10.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node12] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Setup firewalld] ******************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:5 Saturday 28 March 2026 19:32:13 -0400 (0:00:00.085) 0:09:48.422 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml for managed-node12 TASK [fedora.linux_system_roles.firewall : Run systemctl] ********************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:5 Saturday 28 March 2026 19:32:13 -0400 (0:00:00.108) 0:09:48.530 ******** ok: [managed-node12] => { "changed": false, "cmd": [ "systemctl", "is-system-running" ], "delta": "0:00:00.009668", "end": "2026-03-28 19:32:13.842189", "failed_when_result": false, "rc": 0, "start": "2026-03-28 19:32:13.832521" } STDOUT: running TASK [fedora.linux_system_roles.firewall : Require installed systemd] ********** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:12 Saturday 28 March 2026 19:32:13 -0400 (0:00:00.470) 0:09:49.001 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "\"No such file or directory\" in __is_system_running.msg | d(\"\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Set flag to indicate that systemd runtime operations are available] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:17 Saturday 28 March 2026 19:32:13 -0400 (0:00:00.033) 0:09:49.035 ******** ok: [managed-node12] => { "ansible_facts": { "__firewall_is_booted": true }, "changed": false } TASK [fedora.linux_system_roles.firewall : Install firewalld] ****************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:22 Saturday 28 March 2026 19:32:13 -0400 (0:00:00.034) 0:09:49.069 ******** ok: [managed-node12] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: firewalld TASK [fedora.linux_system_roles.firewall : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:34 Saturday 28 March 2026 19:32:14 -0400 (0:00:00.851) 0:09:49.920 ******** skipping: [managed-node12] => { "false_condition": "__firewall_is_transactional | d(false)" } TASK [fedora.linux_system_roles.firewall : Reboot transactional update systems] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:39 Saturday 28 March 2026 19:32:14 -0400 (0:00:00.045) 0:09:49.966 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Fail if reboot is needed and not set] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:44 Saturday 28 March 2026 19:32:14 -0400 (0:00:00.047) 0:09:50.013 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check which conflicting services are enabled] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:8 Saturday 28 March 2026 19:32:14 -0400 (0:00:00.050) 0:09:50.063 ******** skipping: [managed-node12] => (item=nftables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "nftables", "skip_reason": "Conditional result was False" } skipping: [managed-node12] => (item=iptables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "iptables", "skip_reason": "Conditional result was False" } skipping: [managed-node12] => (item=ufw) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "ufw", "skip_reason": "Conditional result was False" } skipping: [managed-node12] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Attempt to stop and disable conflicting services] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:17 Saturday 28 March 2026 19:32:15 -0400 (0:00:00.070) 0:09:50.133 ******** skipping: [managed-node12] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', 'false_condition': 'firewall_disable_conflicting_services | bool', 'item': 'nftables', 'ansible_loop_var': 'item'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "nftables", "skip_reason": "Conditional result was False", "skipped": true }, "skip_reason": "Conditional result was False" } skipping: [managed-node12] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', 'false_condition': 'firewall_disable_conflicting_services | bool', 'item': 'iptables', 'ansible_loop_var': 'item'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "iptables", "skip_reason": "Conditional result was False", "skipped": true }, "skip_reason": "Conditional result was False" } skipping: [managed-node12] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', 'false_condition': 'firewall_disable_conflicting_services | bool', 'item': 'ufw', 'ansible_loop_var': 'item'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "ufw", "skip_reason": "Conditional result was False", "skipped": true }, "skip_reason": "Conditional result was False" } skipping: [managed-node12] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Unmask firewalld service] *********** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:27 Saturday 28 March 2026 19:32:15 -0400 (0:00:00.065) 0:09:50.198 ******** ok: [managed-node12] => { "changed": false, "name": "firewalld", "status": { "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "dbus-broker.service basic.target polkit.service system.slice sysinit.target dbus.socket", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target network-pre.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "yes", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target ebtables.service ipset.service iptables.service ip6tables.service", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DeviceAllow": "char-rtc r", "DevicePolicy": "closed", "Documentation": "\"man:firewalld(1)\"", "DynamicUser": "no", "EffectiveMemoryHigh": "3630931968", "EffectiveMemoryMax": "3630931968", "EffectiveTasksMax": "21802", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13626", "LimitNPROCSoft": "13626", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13626", "LimitSIGPENDINGSoft": "13626", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "yes", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3123310592", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "yes", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "firewalld.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "yes", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "yes", "ProtectControlGroups": "yes", "ProtectControlGroupsEx": "yes", "ProtectHome": "tmpfs", "ProtectHostname": "yes", "ProtectKernelLogs": "yes", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "yes", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice sysinit.target dbus-broker.service dbus.socket", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "yes", "RestrictSUIDSGID": "yes", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallArchitectures": "native", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "21802", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "disabled", "UtmpMode": "init", "Wants": "network-pre.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.firewall : Enable and start firewalld service] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:33 Saturday 28 March 2026 19:32:15 -0400 (0:00:00.621) 0:09:50.820 ******** changed: [managed-node12] => { "changed": true, "enabled": true, "name": "firewalld", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "dbus.socket system.slice dbus-broker.service sysinit.target polkit.service basic.target", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target network-pre.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "yes", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "ipset.service ebtables.service iptables.service ip6tables.service shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DeviceAllow": "char-rtc r", "DevicePolicy": "closed", "Documentation": "\"man:firewalld(1)\"", "DynamicUser": "no", "EffectiveMemoryHigh": "3630931968", "EffectiveMemoryMax": "3630931968", "EffectiveTasksMax": "21802", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13626", "LimitNPROCSoft": "13626", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13626", "LimitSIGPENDINGSoft": "13626", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "yes", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3124637696", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "yes", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "firewalld.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "yes", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "yes", "ProtectControlGroups": "yes", "ProtectControlGroupsEx": "yes", "ProtectHome": "tmpfs", "ProtectHostname": "yes", "ProtectKernelLogs": "yes", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "yes", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice dbus-broker.service sysinit.target dbus.socket", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "yes", "RestrictSUIDSGID": "yes", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallArchitectures": "native", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "21802", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "disabled", "UtmpMode": "init", "Wants": "network-pre.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.firewall : Check if previous replaced is defined] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:39 Saturday 28 March 2026 19:32:18 -0400 (0:00:02.721) 0:09:53.541 ******** ok: [managed-node12] => { "ansible_facts": { "__firewall_previous_replaced": false, "__firewall_python_cmd": "/usr/bin/python3.12", "__firewall_report_changed": true }, "changed": false } TASK [fedora.linux_system_roles.firewall : Get config files, checksums before and remove] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:48 Saturday 28 March 2026 19:32:18 -0400 (0:00:00.108) 0:09:53.649 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Tell firewall module it is able to report changed] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:61 Saturday 28 March 2026 19:32:18 -0400 (0:00:00.222) 0:09:53.872 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Configure firewall] ***************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:77 Saturday 28 March 2026 19:32:18 -0400 (0:00:00.071) 0:09:53.943 ******** changed: [managed-node12] => (item={'port': '7500/tcp', 'zone': 'public', 'state': 'enabled', 'immediate': True, 'permanent': True}) => { "__firewall_changed": true, "ansible_loop_var": "item", "changed": true, "item": { "immediate": true, "permanent": true, "port": "7500/tcp", "state": "enabled", "zone": "public" } } TASK [fedora.linux_system_roles.firewall : Gather firewall config information] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:130 Saturday 28 March 2026 19:32:19 -0400 (0:00:00.894) 0:09:54.838 ******** skipping: [managed-node12] => (item={'port': '7500/tcp', 'zone': 'public', 'state': 'enabled', 'immediate': True, 'permanent': True}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "'detailed' in fw[0]", "item": { "immediate": true, "permanent": true, "port": "7500/tcp", "state": "enabled", "zone": "public" }, "skip_reason": "Conditional result was False" } skipping: [managed-node12] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:141 Saturday 28 March 2026 19:32:19 -0400 (0:00:00.148) 0:09:54.987 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "'detailed' in fw[0]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Gather firewall config if no arguments] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:150 Saturday 28 March 2026 19:32:20 -0400 (0:00:00.116) 0:09:55.103 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:156 Saturday 28 March 2026 19:32:20 -0400 (0:00:00.099) 0:09:55.203 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Get config files, checksums after] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:165 Saturday 28 March 2026 19:32:20 -0400 (0:00:00.079) 0:09:55.282 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Calculate what has changed] ********* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:176 Saturday 28 March 2026 19:32:20 -0400 (0:00:00.066) 0:09:55.349 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Show diffs] ************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:182 Saturday 28 March 2026 19:32:20 -0400 (0:00:00.055) 0:09:55.405 ******** skipping: [managed-node12] => { "false_condition": "__firewall_previous_replaced | bool" } TASK [fedora.linux_system_roles.nbde_server : Reload the daemons so the new changes take effect] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:34 Saturday 28 March 2026 19:32:20 -0400 (0:00:00.075) 0:09:55.481 ******** ok: [managed-node12] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.nbde_server : Ensure required services are enabled and at the right state] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:41 Saturday 28 March 2026 19:32:21 -0400 (0:00:00.909) 0:09:56.390 ******** changed: [managed-node12] => (item=tangd.socket) => { "ansible_loop_var": "item", "changed": true, "enabled": true, "item": "tangd.socket", "name": "tangd.socket", "state": "started", "status": { "Accept": "yes", "AccessSELinuxContext": "system_u:object_r:tangd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "system.slice sysinit.target systemd-journald.socket", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Backlog": "2147483647", "Before": "sockets.target shutdown.target", "BindIPv6Only": "default", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "Broadcast": "no", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "no", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "DeferAcceptUSec": "0", "Delegate": "no", "Description": "Tang Server socket", "DevicePolicy": "auto", "DirectoryMode": "0755", "Documentation": "\"man:tang(8)\"", "DropInPaths": "/etc/systemd/system/tangd.socket.d/override.conf", "DynamicUser": "no", "EffectiveMemoryHigh": "3630931968", "EffectiveMemoryMax": "3630931968", "EffectiveTasksMax": "21802", "ExecStartPre": "{ path=/usr/bin/chown ; argv[]=/usr/bin/chown -R tang:tang /var/db/tang ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorName": "connection", "FinalKillSignal": "9", "FlushPending": "no", "FragmentPath": "/usr/lib/systemd/system/tangd.socket", "FreeBind": "no", "FreezerState": "running", "GID": "[not set]", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "IPTOS": "-1", "IPTTL": "-1", "Id": "tangd.socket", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeepAlive": "no", "KeepAliveIntervalUSec": "0", "KeepAliveProbes": "0", "KeepAliveTimeUSec": "0", "KeyringMode": "shared", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13626", "LimitNPROCSoft": "13626", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13626", "LimitSIGPENDINGSoft": "13626", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "Listen": "[::]:7500 (Stream)", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "Mark": "-1", "MaxConnections": "64", "MaxConnectionsPerSource": "0", "MemoryAccounting": "yes", "MemoryAvailable": "3150188544", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MessageQueueMaxMessages": "0", "MessageQueueMessageSize": "0", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NAccepted": "0", "NConnections": "0", "NRefused": "0", "NUMAPolicy": "n/a", "Names": "tangd.socket", "NeedDaemonReload": "no", "Nice": "0", "NoDelay": "no", "NoNewPrivileges": "no", "NonBlocking": "no", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "PassCredentials": "no", "PassFileDescriptorsToExec": "no", "PassPacketInfo": "no", "PassSecurity": "no", "Perpetual": "no", "PipeSize": "0", "PollLimitBurst": "150", "PollLimitIntervalUSec": "2s", "Priority": "-1", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "ReceiveBuffer": "0", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemoveIPC": "no", "RemoveOnStop": "no", "Requires": "sysinit.target system.slice", "RestartKillSignal": "15", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "ReusePort": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "SameProcessGroup": "no", "SecureBits": "0", "SendBuffer": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SocketMode": "0666", "SocketProtocol": "0", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "21802", "TimeoutCleanUSec": "infinity", "TimeoutUSec": "1min 30s", "TimerSlackNSec": "50000", "Timestamping": "off", "Transient": "no", "Transparent": "no", "TriggerLimitBurst": "200", "TriggerLimitIntervalUSec": "2s", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "disabled", "UtmpMode": "init", "WatchdogSignal": "6", "Writable": "no" } } TASK [Create encrypted Stratis pool with Clevis/Tang] ************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:244 Saturday 28 March 2026 19:32:22 -0400 (0:00:01.086) 0:09:57.477 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tasks/run_role_with_clear_facts.yml for managed-node12 TASK [Clear facts] ************************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tasks/run_role_with_clear_facts.yml:10 Saturday 28 March 2026 19:32:22 -0400 (0:00:00.070) 0:09:57.547 ******** META: facts cleared TASK [Run the role] ************************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tasks/run_role_with_clear_facts.yml:24 Saturday 28 March 2026 19:32:22 -0400 (0:00:00.014) 0:09:57.562 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "__sr_failed_when is defined", "skip_reason": "Conditional result was False" } TASK [Run the role normally] *************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tasks/run_role_with_clear_facts.yml:34 Saturday 28 March 2026 19:32:22 -0400 (0:00:00.051) 0:09:57.614 ******** included: fedora.linux_system_roles.storage for managed-node12 TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Saturday 28 March 2026 19:32:22 -0400 (0:00:00.091) 0:09:57.705 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node12 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Saturday 28 March 2026 19:32:22 -0400 (0:00:00.068) 0:09:57.773 ******** ok: [managed-node12] TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Saturday 28 March 2026 19:32:23 -0400 (0:00:01.111) 0:09:58.885 ******** skipping: [managed-node12] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node12] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node12] => (item=CentOS_10.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-fs", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "xfsprogs", "stratisd", "stratis-cli", "{{ 'libblockdev-s390' if ansible_facts['architecture'] == 's390x' else 'libblockdev' }}", "vdo" ] }, "ansible_included_var_files": [ "/tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node12] => (item=CentOS_10.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-fs", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "xfsprogs", "stratisd", "stratis-cli", "{{ 'libblockdev-s390' if ansible_facts['architecture'] == 's390x' else 'libblockdev' }}", "vdo" ] }, "ansible_included_var_files": [ "/tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Saturday 28 March 2026 19:32:23 -0400 (0:00:00.138) 0:09:59.023 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "not __storage_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Saturday 28 March 2026 19:32:23 -0400 (0:00:00.036) 0:09:59.059 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "not __storage_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Saturday 28 March 2026 19:32:24 -0400 (0:00:00.041) 0:09:59.101 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Saturday 28 March 2026 19:32:24 -0400 (0:00:00.040) 0:09:59.142 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Saturday 28 March 2026 19:32:24 -0400 (0:00:00.056) 0:09:59.198 ******** redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node12 TASK [fedora.linux_system_roles.storage : Add repo key] ************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Saturday 28 March 2026 19:32:24 -0400 (0:00:00.173) 0:09:59.372 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_blivet_custom_repo.key is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Add blivet repo] ********************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:15 Saturday 28 March 2026 19:32:24 -0400 (0:00:00.035) 0:09:59.407 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_blivet_custom_repo.key is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:20 Saturday 28 March 2026 19:32:24 -0400 (0:00:00.031) 0:09:59.438 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:27 Saturday 28 March 2026 19:32:24 -0400 (0:00:00.041) 0:09:59.480 ******** ok: [managed-node12] => { "storage_pools | d([])": [ { "disks": "sda", "encryption": true, "encryption_clevis_pin": "tang", "encryption_password": "yabbadabbadoo", "encryption_tang_url": "localhost:7500", "name": "foo", "type": "stratis" } ] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:32 Saturday 28 March 2026 19:32:24 -0400 (0:00:00.037) 0:09:59.517 ******** ok: [managed-node12] => { "storage_volumes | d([])": [] } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37 Saturday 28 March 2026 19:32:24 -0400 (0:00:00.030) 0:09:59.548 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:50 Saturday 28 March 2026 19:32:24 -0400 (0:00:00.031) 0:09:59.579 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:56 Saturday 28 March 2026 19:32:24 -0400 (0:00:00.026) 0:09:59.606 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:76 Saturday 28 March 2026 19:32:24 -0400 (0:00:00.027) 0:09:59.634 ******** ok: [managed-node12] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "apt-daily.service": { "name": "apt-daily.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.fedoraproject.FirewallD1.service": { "name": "dbus-org.fedoraproject.FirewallD1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fcoe.service": { "name": "fcoe.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "running", "status": "enabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ipset.service": { "name": "ipset.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "iscsi-shutdown.service": { "name": "iscsi-shutdown.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsi.service": { "name": "iscsi.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsid.service": { "name": "iscsid.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-activation-early.service": { "name": "lvm2-activation-early.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_multipath.service": { "name": "modprobe@dm_multipath.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "raid-check.service": { "name": "raid-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "rbdmap.service": { "name": "rbdmap.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-unix-local@.service": { "name": "sshd-unix-local@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd-vsock@.service": { "name": "sshd-vsock@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "stratis-fstab-setup-with-network@.service": { "name": "stratis-fstab-setup-with-network@.service", "source": "systemd", "state": "unknown", "status": "static" }, "stratis-fstab-setup@.service": { "name": "stratis-fstab-setup@.service", "source": "systemd", "state": "unknown", "status": "static" }, "stratisd-min-postinitrd.service": { "name": "stratisd-min-postinitrd.service", "source": "systemd", "state": "inactive", "status": "static" }, "stratisd.service": { "name": "stratisd.service", "source": "systemd", "state": "running", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-userdbd.service": { "name": "systemd-userdbd.service", "source": "systemd", "state": "running", "status": "indirect" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "tangd@.service": { "name": "tangd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:82 Saturday 28 March 2026 19:32:26 -0400 (0:00:02.094) 0:10:01.728 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:88 Saturday 28 March 2026 19:32:26 -0400 (0:00:00.061) 0:10:01.790 ******** changed: [managed-node12] => { "actions": [ { "action": "create format", "device": "/dev/sda", "fs_type": "stratis" }, { "action": "create device", "device": "/dev/stratis/foo", "fs_type": null } ], "changed": true, "crypts": [], "leaves": [ "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/xvda2", "/dev/stratis/foo" ], "mounts": [], "packages": [ "stratisd", "stratis-cli", "xfsprogs" ], "pools": [ { "disks": [ "sda" ], "encryption": true, "encryption_cipher": null, "encryption_clevis_pin": "tang", "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "encryption_tang_thumbprint": null, "encryption_tang_url": "localhost:7500", "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "stratis", "volumes": [] } ], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:103 Saturday 28 March 2026 19:32:43 -0400 (0:00:16.995) 0:10:18.785 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_udevadm_trigger | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ****** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:110 Saturday 28 March 2026 19:32:43 -0400 (0:00:00.068) 0:10:18.853 ******** ok: [managed-node12] => { "changed": false, "stat": { "atime": 1774740645.8607492, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "6aeccfbb3223a634b983c3c21792c1ba90809bb8", "ctime": 1774740641.1286864, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 230686985, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1774740641.1286864, "nlink": 1, "path": "/etc/fstab", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1408, "uid": 0, "version": "1679064280", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:115 Saturday 28 March 2026 19:32:44 -0400 (0:00:00.515) 0:10:19.369 ******** ok: [managed-node12] => { "backup": "", "changed": false } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:133 Saturday 28 March 2026 19:32:44 -0400 (0:00:00.484) 0:10:19.853 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:139 Saturday 28 March 2026 19:32:44 -0400 (0:00:00.051) 0:10:19.904 ******** ok: [managed-node12] => { "blivet_output": { "actions": [ { "action": "create format", "device": "/dev/sda", "fs_type": "stratis" }, { "action": "create device", "device": "/dev/stratis/foo", "fs_type": null } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/xvda2", "/dev/stratis/foo" ], "mounts": [], "packages": [ "stratisd", "stratis-cli", "xfsprogs" ], "pools": [ { "disks": [ "sda" ], "encryption": true, "encryption_cipher": null, "encryption_clevis_pin": "tang", "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "encryption_tang_thumbprint": null, "encryption_tang_url": "localhost:7500", "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "stratis", "volumes": [] } ], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148 Saturday 28 March 2026 19:32:44 -0400 (0:00:00.037) 0:10:19.942 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": true, "encryption_cipher": null, "encryption_clevis_pin": "tang", "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "encryption_tang_thumbprint": null, "encryption_tang_url": "localhost:7500", "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "stratis", "volumes": [] } ] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:152 Saturday 28 March 2026 19:32:44 -0400 (0:00:00.044) 0:10:19.988 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:168 Saturday 28 March 2026 19:32:44 -0400 (0:00:00.053) 0:10:20.041 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:179 Saturday 28 March 2026 19:32:44 -0400 (0:00:00.041) 0:10:20.083 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "blivet_output['mounts'] | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:184 Saturday 28 March 2026 19:32:45 -0400 (0:00:00.030) 0:10:20.113 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195 Saturday 28 March 2026 19:32:45 -0400 (0:00:00.027) 0:10:20.141 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:207 Saturday 28 March 2026 19:32:45 -0400 (0:00:00.030) 0:10:20.171 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "blivet_output['mounts'] | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:215 Saturday 28 March 2026 19:32:45 -0400 (0:00:00.027) 0:10:20.198 ******** ok: [managed-node12] => { "changed": false, "stat": { "atime": 1774737762.6954868, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1774342704.475, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 4194435, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1774342323.629, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "1224473831", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:220 Saturday 28 March 2026 19:32:45 -0400 (0:00:00.400) 0:10:20.599 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:242 Saturday 28 March 2026 19:32:45 -0400 (0:00:00.035) 0:10:20.635 ******** ok: [managed-node12] TASK [Verify role results - 11] ************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:256 Saturday 28 March 2026 19:32:46 -0400 (0:00:01.060) 0:10:21.695 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node12 TASK [Print out pool information] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2 Saturday 28 March 2026 19:32:46 -0400 (0:00:00.117) 0:10:21.813 ******** ok: [managed-node12] => { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": true, "encryption_cipher": null, "encryption_clevis_pin": "tang", "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "encryption_tang_thumbprint": null, "encryption_tang_url": "localhost:7500", "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "stratis", "volumes": [] } ] } TASK [Print out volume information] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7 Saturday 28 March 2026 19:32:46 -0400 (0:00:00.092) 0:10:21.906 ******** skipping: [managed-node12] => { "false_condition": "_storage_volumes_list | length > 0" } TASK [Collect info about the volumes.] ***************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15 Saturday 28 March 2026 19:32:46 -0400 (0:00:00.064) 0:10:21.972 ******** ok: [managed-node12] => { "changed": false, "info": { "/dev/loop0": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/loop0", "size": "0B", "type": "loop", "uuid": "" }, "/dev/mapper/stratis-1-private-035eb0cdeecb407aacde0e95586eb307-crypt": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/mapper/stratis-1-private-035eb0cdeecb407aacde0e95586eb307-crypt", "size": "9.7G", "type": "crypt", "uuid": "" }, "/dev/mapper/stratis-1-private-035eb0cdeecb407aacde0e95586eb307-flex-mdv": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/mapper/stratis-1-private-035eb0cdeecb407aacde0e95586eb307-flex-mdv", "size": "512M", "type": "stratis", "uuid": "" }, "/dev/mapper/stratis-1-private-035eb0cdeecb407aacde0e95586eb307-flex-thindata": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/mapper/stratis-1-private-035eb0cdeecb407aacde0e95586eb307-flex-thindata", "size": "9.2G", "type": "stratis", "uuid": "" }, "/dev/mapper/stratis-1-private-035eb0cdeecb407aacde0e95586eb307-flex-thinmeta": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/mapper/stratis-1-private-035eb0cdeecb407aacde0e95586eb307-flex-thinmeta", "size": "6M", "type": "stratis", "uuid": "" }, "/dev/mapper/stratis-1-private-035eb0cdeecb407aacde0e95586eb307-physical-cache": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/mapper/stratis-1-private-035eb0cdeecb407aacde0e95586eb307-physical-cache", "size": "9.7G", "type": "stratis", "uuid": "" }, "/dev/mapper/stratis-1-private-035eb0cdeecb407aacde0e95586eb307-physical-originsub": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/mapper/stratis-1-private-035eb0cdeecb407aacde0e95586eb307-physical-originsub", "size": "9.7G", "type": "stratis", "uuid": "" }, "/dev/mapper/stratis-1-private-035eb0cdeecb407aacde0e95586eb307-thinpool-pool": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/mapper/stratis-1-private-035eb0cdeecb407aacde0e95586eb307-thinpool-pool", "size": "9.2G", "type": "stratis", "uuid": "" }, "/dev/sda": { "fstype": "stratis", "label": "", "mountpoint": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "803966ff-c939-45fe-a535-ca0e808dc572" }, "/dev/sdb": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda1", "size": "1M", "type": "partition", "uuid": "" }, "/dev/xvda2": { "fstype": "xfs", "label": "", "mountpoint": "/", "name": "/dev/xvda2", "size": "250G", "type": "partition", "uuid": "94fc577e-f1df-44bb-8e86-63b9a68f8f7f" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20 Saturday 28 March 2026 19:32:47 -0400 (0:00:00.504) 0:10:22.476 ******** ok: [managed-node12] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.002919", "end": "2026-03-28 19:32:47.793470", "rc": 0, "start": "2026-03-28 19:32:47.790551" } STDOUT: # system_role:storage # # /etc/fstab # Created by anaconda on Tue Mar 24 08:52:03 2026 # # Accessible filesystems, by reference, are maintained under '/dev/disk/'. # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info. # # After editing this file, run 'systemctl daemon-reload' to update systemd # units generated from this file. # UUID=94fc577e-f1df-44bb-8e86-63b9a68f8f7f / xfs defaults 0 0 ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25 Saturday 28 March 2026 19:32:47 -0400 (0:00:00.500) 0:10:22.977 ******** ok: [managed-node12] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.003392", "end": "2026-03-28 19:32:48.279133", "failed_when_result": false, "rc": 0, "start": "2026-03-28 19:32:48.275741" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34 Saturday 28 March 2026 19:32:48 -0400 (0:00:00.468) 0:10:23.445 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node12 => (item={'disks': ['sda'], 'encryption': True, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER', 'encryption_clevis_pin': 'tang', 'encryption_tang_url': 'localhost:7500', 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'present', 'type': 'stratis', 'volumes': []}) TASK [Set _storage_pool_tests] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5 Saturday 28 March 2026 19:32:48 -0400 (0:00:00.098) 0:10:23.544 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Get VG shared value status] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18 Saturday 28 March 2026 19:32:48 -0400 (0:00:00.047) 0:10:23.591 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'", "skip_reason": "Conditional result was False" } TASK [Verify that VG shared value checks out] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24 Saturday 28 March 2026 19:32:48 -0400 (0:00:00.036) 0:10:23.627 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'", "skip_reason": "Conditional result was False" } TASK [Verify pool subset] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34 Saturday 28 March 2026 19:32:48 -0400 (0:00:00.040) 0:10:23.668 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node12 => (item=members) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node12 => (item=volumes) TASK [Set test variables] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2 Saturday 28 March 2026 19:32:48 -0400 (0:00:00.062) 0:10:23.730 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Get the canonical device path for each member device] ******************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8 Saturday 28 March 2026 19:32:48 -0400 (0:00:00.025) 0:10:23.756 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Set pvs lvm length] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17 Saturday 28 March 2026 19:32:48 -0400 (0:00:00.014) 0:10:23.770 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Set pool pvs] ************************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22 Saturday 28 March 2026 19:32:48 -0400 (0:00:00.027) 0:10:23.798 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Verify PV count] ********************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27 Saturday 28 March 2026 19:32:48 -0400 (0:00:00.023) 0:10:23.821 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Set expected pv type] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36 Saturday 28 March 2026 19:32:48 -0400 (0:00:00.020) 0:10:23.841 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Set expected pv type - 2] ************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41 Saturday 28 March 2026 19:32:48 -0400 (0:00:00.021) 0:10:23.863 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm' and not storage_test_pool.encryption", "skip_reason": "Conditional result was False" } TASK [Set expected pv type - 3] ************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46 Saturday 28 March 2026 19:32:48 -0400 (0:00:00.019) 0:10:23.883 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:55 Saturday 28 March 2026 19:32:48 -0400 (0:00:00.020) 0:10:23.903 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Check that blivet supports PV grow to fill] ****************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:68 Saturday 28 March 2026 19:32:48 -0400 (0:00:00.014) 0:10:23.918 ******** ok: [managed-node12] => { "changed": false, "failed_when_result": false, "rc": 0 } STDERR: OpenSSH_9.9p1, OpenSSL 3.5.5 27 Jan 2026 debug1: Reading configuration data /root/.ssh/config debug1: Reading configuration data /etc/ssh/ssh_config debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf debug2: checking match for 'final all' host 10.31.11.27 originally 10.31.11.27 debug2: match not found debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config debug1: configuration requests final Match pass debug2: resolve_canonicalize: hostname 10.31.11.27 is address debug1: re-parsing configuration debug1: Reading configuration data /root/.ssh/config debug1: Reading configuration data /etc/ssh/ssh_config debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf debug2: checking match for 'final all' host 10.31.11.27 originally 10.31.11.27 debug2: match found debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config debug1: auto-mux: Trying existing master at '/root/.ansible/cp/e1141b92e1' debug2: fd 3 setting O_NONBLOCK debug2: mux_client_hello_exchange: master version 4 debug1: mux_client_request_session: master session id: 2 debug2: Received exit status from master 0 Shared connection to 10.31.11.27 closed. TASK [Verify that PVs fill the whole devices when they should] ***************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:78 Saturday 28 March 2026 19:32:49 -0400 (0:00:00.448) 0:10:24.366 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Check MD RAID] *********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:88 Saturday 28 March 2026 19:32:49 -0400 (0:00:00.055) 0:10:24.421 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node12 TASK [Get information about RAID] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8 Saturday 28 March 2026 19:32:49 -0400 (0:00:00.155) 0:10:24.577 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14 Saturday 28 March 2026 19:32:49 -0400 (0:00:00.076) 0:10:24.653 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19 Saturday 28 March 2026 19:32:49 -0400 (0:00:00.071) 0:10:24.725 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24 Saturday 28 March 2026 19:32:49 -0400 (0:00:00.105) 0:10:24.830 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set md chunk size regex] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29 Saturday 28 March 2026 19:32:49 -0400 (0:00:00.082) 0:10:24.912 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37 Saturday 28 March 2026 19:32:49 -0400 (0:00:00.063) 0:10:24.976 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46 Saturday 28 March 2026 19:32:49 -0400 (0:00:00.058) 0:10:25.034 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55 Saturday 28 March 2026 19:32:49 -0400 (0:00:00.045) 0:10:25.079 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64 Saturday 28 March 2026 19:32:50 -0400 (0:00:00.072) 0:10:25.152 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74 Saturday 28 March 2026 19:32:50 -0400 (0:00:00.040) 0:10:25.193 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83 Saturday 28 March 2026 19:32:50 -0400 (0:00:00.042) 0:10:25.235 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_chunk_size_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:91 Saturday 28 March 2026 19:32:50 -0400 (0:00:00.035) 0:10:25.271 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node12 TASK [Validate pool member LVM RAID settings] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2 Saturday 28 March 2026 19:32:50 -0400 (0:00:00.075) 0:10:25.347 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Check Thin Pools] ******************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:94 Saturday 28 March 2026 19:32:50 -0400 (0:00:00.036) 0:10:25.383 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node12 TASK [Validate pool member thinpool settings] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2 Saturday 28 March 2026 19:32:50 -0400 (0:00:00.073) 0:10:25.456 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Check member encryption] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:97 Saturday 28 March 2026 19:32:50 -0400 (0:00:00.027) 0:10:25.483 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node12 TASK [Set test variables] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5 Saturday 28 March 2026 19:32:50 -0400 (0:00:00.061) 0:10:25.545 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "1", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10 Saturday 28 March 2026 19:32:50 -0400 (0:00:00.030) 0:10:25.575 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Validate pool member crypttab entries] *********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17 Saturday 28 March 2026 19:32:50 -0400 (0:00:00.016) 0:10:25.592 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24 Saturday 28 March 2026 19:32:50 -0400 (0:00:00.020) 0:10:25.612 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:100 Saturday 28 March 2026 19:32:50 -0400 (0:00:00.024) 0:10:25.637 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node12 TASK [Validate pool member VDO settings] *************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2 Saturday 28 March 2026 19:32:50 -0400 (0:00:00.051) 0:10:25.688 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Check Stratis] *********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:103 Saturday 28 March 2026 19:32:50 -0400 (0:00:00.022) 0:10:25.711 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node12 TASK [Get stratis pool information] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6 Saturday 28 March 2026 19:32:50 -0400 (0:00:00.085) 0:10:25.797 ******** ok: [managed-node12] => { "changed": false, "rc": 0 } STDOUT: {"name": "foo", "encrypted": true, "key_desc": "blivet-foo", "clevis_pin": "tang", "clevis_args": {"thp": "SJTRMEcogeU2yKtYMh-wvo8zKm_ULqrlPu5_-8IrCqI", "url": "localhost:7500"}} STDERR: OpenSSH_9.9p1, OpenSSL 3.5.5 27 Jan 2026 debug1: Reading configuration data /root/.ssh/config debug1: Reading configuration data /etc/ssh/ssh_config debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf debug2: checking match for 'final all' host 10.31.11.27 originally 10.31.11.27 debug2: match not found debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config debug1: configuration requests final Match pass debug2: resolve_canonicalize: hostname 10.31.11.27 is address debug1: re-parsing configuration debug1: Reading configuration data /root/.ssh/config debug1: Reading configuration data /etc/ssh/ssh_config debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf debug2: checking match for 'final all' host 10.31.11.27 originally 10.31.11.27 debug2: match found debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config debug1: auto-mux: Trying existing master at '/root/.ansible/cp/e1141b92e1' debug2: fd 3 setting O_NONBLOCK debug2: mux_client_hello_exchange: master version 4 debug1: mux_client_request_session: master session id: 2 debug2: Received exit status from master 0 Shared connection to 10.31.11.27 closed. TASK [Print script output] ***************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15 Saturday 28 March 2026 19:32:51 -0400 (0:00:00.541) 0:10:26.338 ******** ok: [managed-node12] => {} MSG: {'name': 'foo', 'encrypted': True, 'key_desc': 'blivet-foo', 'clevis_pin': 'tang', 'clevis_args': {'thp': 'SJTRMEcogeU2yKtYMh-wvo8zKm_ULqrlPu5_-8IrCqI', 'url': 'localhost:7500'}} TASK [Get information about Stratis] ******************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:19 Saturday 28 March 2026 19:32:51 -0400 (0:00:00.040) 0:10:26.378 ******** ok: [managed-node12] => { "ansible_facts": { "_stratis_pool_info": { "clevis_args": { "thp": "SJTRMEcogeU2yKtYMh-wvo8zKm_ULqrlPu5_-8IrCqI", "url": "localhost:7500" }, "clevis_pin": "tang", "encrypted": true, "key_desc": "blivet-foo", "name": "foo" } }, "changed": false } TASK [Verify that the pools was created] *************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:23 Saturday 28 March 2026 19:32:51 -0400 (0:00:00.053) 0:10:26.432 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Verify that encryption is correctly set] ********************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:30 Saturday 28 March 2026 19:32:51 -0400 (0:00:00.062) 0:10:26.494 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Verify that Clevis/Tang encryption is correctly set] ********************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:39 Saturday 28 March 2026 19:32:51 -0400 (0:00:00.048) 0:10:26.542 ******** ok: [managed-node12] => { "changed": false } MSG: All assertions passed TASK [Reset variable used by test] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:49 Saturday 28 March 2026 19:32:51 -0400 (0:00:00.052) 0:10:26.595 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_stratis_report": null }, "changed": false } TASK [Clean up test variables] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:106 Saturday 28 March 2026 19:32:51 -0400 (0:00:00.043) 0:10:26.638 ******** ok: [managed-node12] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3 Saturday 28 March 2026 19:32:51 -0400 (0:00:00.040) 0:10:26.678 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:43 Saturday 28 March 2026 19:32:51 -0400 (0:00:00.024) 0:10:26.703 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Clean up variable namespace] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:52 Saturday 28 March 2026 19:32:51 -0400 (0:00:00.025) 0:10:26.729 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } TASK [Clean up - 4] ************************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:260 Saturday 28 March 2026 19:32:51 -0400 (0:00:00.036) 0:10:26.765 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tasks/run_role_with_clear_facts.yml for managed-node12 TASK [Clear facts] ************************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tasks/run_role_with_clear_facts.yml:10 Saturday 28 March 2026 19:32:51 -0400 (0:00:00.049) 0:10:26.815 ******** META: facts cleared TASK [Run the role] ************************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tasks/run_role_with_clear_facts.yml:24 Saturday 28 March 2026 19:32:51 -0400 (0:00:00.009) 0:10:26.824 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "__sr_failed_when is defined", "skip_reason": "Conditional result was False" } TASK [Run the role normally] *************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tasks/run_role_with_clear_facts.yml:34 Saturday 28 March 2026 19:32:51 -0400 (0:00:00.030) 0:10:26.855 ******** included: fedora.linux_system_roles.storage for managed-node12 TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Saturday 28 March 2026 19:32:51 -0400 (0:00:00.039) 0:10:26.894 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node12 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Saturday 28 March 2026 19:32:51 -0400 (0:00:00.025) 0:10:26.920 ******** ok: [managed-node12] TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Saturday 28 March 2026 19:32:52 -0400 (0:00:01.008) 0:10:27.928 ******** skipping: [managed-node12] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node12] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node12] => (item=CentOS_10.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-fs", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "xfsprogs", "stratisd", "stratis-cli", "{{ 'libblockdev-s390' if ansible_facts['architecture'] == 's390x' else 'libblockdev' }}", "vdo" ] }, "ansible_included_var_files": [ "/tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node12] => (item=CentOS_10.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-fs", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "xfsprogs", "stratisd", "stratis-cli", "{{ 'libblockdev-s390' if ansible_facts['architecture'] == 's390x' else 'libblockdev' }}", "vdo" ] }, "ansible_included_var_files": [ "/tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Saturday 28 March 2026 19:32:52 -0400 (0:00:00.119) 0:10:28.048 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "not __storage_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Saturday 28 March 2026 19:32:52 -0400 (0:00:00.042) 0:10:28.091 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "not __storage_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Saturday 28 March 2026 19:32:53 -0400 (0:00:00.039) 0:10:28.130 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Saturday 28 March 2026 19:32:53 -0400 (0:00:00.032) 0:10:28.163 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Saturday 28 March 2026 19:32:53 -0400 (0:00:00.034) 0:10:28.197 ******** redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node12 TASK [fedora.linux_system_roles.storage : Add repo key] ************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Saturday 28 March 2026 19:32:53 -0400 (0:00:00.075) 0:10:28.273 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_blivet_custom_repo.key is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Add blivet repo] ********************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:15 Saturday 28 March 2026 19:32:53 -0400 (0:00:00.034) 0:10:28.307 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "_blivet_custom_repo.key is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:20 Saturday 28 March 2026 19:32:53 -0400 (0:00:00.033) 0:10:28.341 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_skip_checks is not defined or not \"blivet_available\" in storage_skip_checks", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:27 Saturday 28 March 2026 19:32:53 -0400 (0:00:00.040) 0:10:28.381 ******** ok: [managed-node12] => { "storage_pools | d([])": [ { "disks": "sda", "name": "foo", "state": "absent", "type": "stratis" } ] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:32 Saturday 28 March 2026 19:32:53 -0400 (0:00:00.042) 0:10:28.423 ******** ok: [managed-node12] => { "storage_volumes | d([])": [] } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37 Saturday 28 March 2026 19:32:53 -0400 (0:00:00.245) 0:10:28.669 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:50 Saturday 28 March 2026 19:32:53 -0400 (0:00:00.037) 0:10:28.706 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:56 Saturday 28 March 2026 19:32:53 -0400 (0:00:00.022) 0:10:28.729 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_skip_checks is not defined or not \"packages_installed\" in storage_skip_checks", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:76 Saturday 28 March 2026 19:32:53 -0400 (0:00:00.024) 0:10:28.754 ******** ok: [managed-node12] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "apt-daily.service": { "name": "apt-daily.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.fedoraproject.FirewallD1.service": { "name": "dbus-org.fedoraproject.FirewallD1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fcoe.service": { "name": "fcoe.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "running", "status": "enabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ipset.service": { "name": "ipset.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "iscsi-shutdown.service": { "name": "iscsi-shutdown.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsi.service": { "name": "iscsi.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsid.service": { "name": "iscsid.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-activation-early.service": { "name": "lvm2-activation-early.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_multipath.service": { "name": "modprobe@dm_multipath.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "raid-check.service": { "name": "raid-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "rbdmap.service": { "name": "rbdmap.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-unix-local@.service": { "name": "sshd-unix-local@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd-vsock@.service": { "name": "sshd-vsock@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "stratis-fstab-setup-with-network@.service": { "name": "stratis-fstab-setup-with-network@.service", "source": "systemd", "state": "unknown", "status": "static" }, "stratis-fstab-setup@.service": { "name": "stratis-fstab-setup@.service", "source": "systemd", "state": "unknown", "status": "static" }, "stratisd-min-postinitrd.service": { "name": "stratisd-min-postinitrd.service", "source": "systemd", "state": "inactive", "status": "static" }, "stratisd.service": { "name": "stratisd.service", "source": "systemd", "state": "running", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-userdbd.service": { "name": "systemd-userdbd.service", "source": "systemd", "state": "running", "status": "indirect" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "tangd@.service": { "name": "tangd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:82 Saturday 28 March 2026 19:32:55 -0400 (0:00:02.035) 0:10:30.789 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:88 Saturday 28 March 2026 19:32:55 -0400 (0:00:00.066) 0:10:30.856 ******** changed: [managed-node12] => { "actions": [ { "action": "destroy device", "device": "/dev/stratis/foo", "fs_type": null }, { "action": "destroy format", "device": "/dev/sda", "fs_type": "stratis" } ], "changed": true, "crypts": [], "leaves": [ "/dev/sda", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/xvda2" ], "mounts": [], "packages": [ "xfsprogs" ], "pools": [ { "disks": [ "sda" ], "encryption": true, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "stratis", "volumes": [] } ], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:103 Saturday 28 March 2026 19:33:01 -0400 (0:00:05.486) 0:10:36.343 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_udevadm_trigger | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ****** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:110 Saturday 28 March 2026 19:33:01 -0400 (0:00:00.027) 0:10:36.371 ******** ok: [managed-node12] => { "changed": false, "stat": { "atime": 1774740645.8607492, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "6aeccfbb3223a634b983c3c21792c1ba90809bb8", "ctime": 1774740641.1286864, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 230686985, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1774740641.1286864, "nlink": 1, "path": "/etc/fstab", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1408, "uid": 0, "version": "1679064280", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:115 Saturday 28 March 2026 19:33:01 -0400 (0:00:00.401) 0:10:36.772 ******** ok: [managed-node12] => { "backup": "", "changed": false } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:133 Saturday 28 March 2026 19:33:02 -0400 (0:00:00.411) 0:10:37.184 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:139 Saturday 28 March 2026 19:33:02 -0400 (0:00:00.032) 0:10:37.216 ******** ok: [managed-node12] => { "blivet_output": { "actions": [ { "action": "destroy device", "device": "/dev/stratis/foo", "fs_type": null }, { "action": "destroy format", "device": "/dev/sda", "fs_type": "stratis" } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/sda", "/dev/sdb", "/dev/sdc", "/dev/sdd", "/dev/sde", "/dev/sdf", "/dev/sdg", "/dev/sdh", "/dev/sdi", "/dev/xvda1", "/dev/xvda2" ], "mounts": [], "packages": [ "xfsprogs" ], "pools": [ { "disks": [ "sda" ], "encryption": true, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "stratis", "volumes": [] } ], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:148 Saturday 28 March 2026 19:33:02 -0400 (0:00:00.027) 0:10:37.244 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": true, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "stratis", "volumes": [] } ] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:152 Saturday 28 March 2026 19:33:02 -0400 (0:00:00.026) 0:10:37.271 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:168 Saturday 28 March 2026 19:33:02 -0400 (0:00:00.025) 0:10:37.296 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:179 Saturday 28 March 2026 19:33:02 -0400 (0:00:00.022) 0:10:37.319 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "blivet_output['mounts'] | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:184 Saturday 28 March 2026 19:33:02 -0400 (0:00:00.023) 0:10:37.343 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:195 Saturday 28 March 2026 19:33:02 -0400 (0:00:00.034) 0:10:37.377 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:207 Saturday 28 March 2026 19:33:02 -0400 (0:00:00.035) 0:10:37.413 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "blivet_output['mounts'] | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:215 Saturday 28 March 2026 19:33:02 -0400 (0:00:00.025) 0:10:37.439 ******** ok: [managed-node12] => { "changed": false, "stat": { "atime": 1774737762.6954868, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1774342704.475, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 4194435, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1774342323.629, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "1224473831", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:220 Saturday 28 March 2026 19:33:02 -0400 (0:00:00.380) 0:10:37.820 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:242 Saturday 28 March 2026 19:33:02 -0400 (0:00:00.016) 0:10:37.836 ******** ok: [managed-node12] TASK [Verify role results - 12] ************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:269 Saturday 28 March 2026 19:33:03 -0400 (0:00:00.943) 0:10:38.780 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml for managed-node12 TASK [Print out pool information] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:2 Saturday 28 March 2026 19:33:03 -0400 (0:00:00.075) 0:10:38.855 ******** ok: [managed-node12] => { "_storage_pools_list": [ { "disks": [ "sda" ], "encryption": true, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": false, "name": "foo", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "stratis", "volumes": [] } ] } TASK [Print out volume information] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:7 Saturday 28 March 2026 19:33:03 -0400 (0:00:00.053) 0:10:38.909 ******** skipping: [managed-node12] => { "false_condition": "_storage_volumes_list | length > 0" } TASK [Collect info about the volumes.] ***************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:15 Saturday 28 March 2026 19:33:03 -0400 (0:00:00.026) 0:10:38.935 ******** ok: [managed-node12] => { "changed": false, "info": { "/dev/loop0": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/loop0", "size": "0B", "type": "loop", "uuid": "" }, "/dev/sda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sda", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdb": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdb", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdc": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdc", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdd": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdd", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sde": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sde", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdf": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdf", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdg": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdg", "size": "1T", "type": "disk", "uuid": "" }, "/dev/sdh": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdh", "size": "10G", "type": "disk", "uuid": "" }, "/dev/sdi": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/sdi", "size": "10G", "type": "disk", "uuid": "" }, "/dev/xvda": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda", "size": "250G", "type": "disk", "uuid": "" }, "/dev/xvda1": { "fstype": "", "label": "", "mountpoint": "", "name": "/dev/xvda1", "size": "1M", "type": "partition", "uuid": "" }, "/dev/xvda2": { "fstype": "xfs", "label": "", "mountpoint": "/", "name": "/dev/xvda2", "size": "250G", "type": "partition", "uuid": "94fc577e-f1df-44bb-8e86-63b9a68f8f7f" } } } TASK [Read the /etc/fstab file for volume existence] *************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:20 Saturday 28 March 2026 19:33:04 -0400 (0:00:00.414) 0:10:39.350 ******** ok: [managed-node12] => { "changed": false, "cmd": [ "cat", "/etc/fstab" ], "delta": "0:00:00.003617", "end": "2026-03-28 19:33:04.589058", "rc": 0, "start": "2026-03-28 19:33:04.585441" } STDOUT: # system_role:storage # # /etc/fstab # Created by anaconda on Tue Mar 24 08:52:03 2026 # # Accessible filesystems, by reference, are maintained under '/dev/disk/'. # See man pages fstab(5), findfs(8), mount(8) and/or blkid(8) for more info. # # After editing this file, run 'systemctl daemon-reload' to update systemd # units generated from this file. # UUID=94fc577e-f1df-44bb-8e86-63b9a68f8f7f / xfs defaults 0 0 ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat /mnt/redhat nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 nest.test.redhat.com:/mnt/qa /mnt/qa nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 vtap-eng01.storage.rdu2.redhat.com:/vol/engarchive /mnt/engarchive nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 nest.test.redhat.com:/mnt/tpsdist /mnt/tpsdist nfs defaults,rsize=8192,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_engineering_sm/devarchive/redhat/brewroot /mnt/brew nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 ntap-rdu2-c01-eng01-nfs01b.storage.rdu2.redhat.com:/bos_eng01_devops_brew_scratch_nfs_sm/scratch /mnt/brew_scratch nfs ro,rsize=32768,wsize=8192,bg,noauto,noatime,nosuid,nodev,intr,noauto 0 0 TASK [Read the /etc/crypttab file] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:25 Saturday 28 March 2026 19:33:04 -0400 (0:00:00.406) 0:10:39.757 ******** ok: [managed-node12] => { "changed": false, "cmd": [ "cat", "/etc/crypttab" ], "delta": "0:00:00.003138", "end": "2026-03-28 19:33:05.028343", "failed_when_result": false, "rc": 0, "start": "2026-03-28 19:33:05.025205" } TASK [Verify the volumes listed in storage_pools were correctly managed] ******* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:34 Saturday 28 March 2026 19:33:05 -0400 (0:00:00.425) 0:10:40.183 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml for managed-node12 => (item={'disks': ['sda'], 'encryption': True, 'encryption_cipher': None, 'encryption_key': None, 'encryption_key_size': None, 'encryption_luks_version': None, 'encryption_password': None, 'encryption_clevis_pin': None, 'encryption_tang_url': None, 'encryption_tang_thumbprint': None, 'grow_to_fill': False, 'name': 'foo', 'raid_level': None, 'raid_device_count': None, 'raid_spare_count': None, 'raid_metadata_version': None, 'raid_chunk_size': None, 'shared': False, 'state': 'absent', 'type': 'stratis', 'volumes': []}) TASK [Set _storage_pool_tests] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:5 Saturday 28 March 2026 19:33:05 -0400 (0:00:00.039) 0:10:40.222 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_pool_tests": [ "members", "volumes" ] }, "changed": false } TASK [Get VG shared value status] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:18 Saturday 28 March 2026 19:33:05 -0400 (0:00:00.023) 0:10:40.245 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'", "skip_reason": "Conditional result was False" } TASK [Verify that VG shared value checks out] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:24 Saturday 28 March 2026 19:33:05 -0400 (0:00:00.020) 0:10:40.266 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm' and storage_test_pool.state == 'present'", "skip_reason": "Conditional result was False" } TASK [Verify pool subset] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool.yml:34 Saturday 28 March 2026 19:33:05 -0400 (0:00:00.020) 0:10:40.286 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml for managed-node12 => (item=members) included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml for managed-node12 => (item=volumes) TASK [Set test variables] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:2 Saturday 28 March 2026 19:33:05 -0400 (0:00:00.051) 0:10:40.338 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Get the canonical device path for each member device] ******************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:8 Saturday 28 March 2026 19:33:05 -0400 (0:00:00.026) 0:10:40.365 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Set pvs lvm length] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:17 Saturday 28 March 2026 19:33:05 -0400 (0:00:00.026) 0:10:40.392 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Set pool pvs] ************************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:22 Saturday 28 March 2026 19:33:05 -0400 (0:00:00.036) 0:10:40.428 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Verify PV count] ********************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:27 Saturday 28 March 2026 19:33:05 -0400 (0:00:00.076) 0:10:40.505 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Set expected pv type] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:36 Saturday 28 March 2026 19:33:05 -0400 (0:00:00.049) 0:10:40.555 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Set expected pv type - 2] ************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:41 Saturday 28 March 2026 19:33:05 -0400 (0:00:00.039) 0:10:40.595 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm' and not storage_test_pool.encryption", "skip_reason": "Conditional result was False" } TASK [Set expected pv type - 3] ************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:46 Saturday 28 March 2026 19:33:05 -0400 (0:00:00.067) 0:10:40.662 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.type == 'lvm'", "skip_reason": "Conditional result was False" } TASK [Check the type of each PV] *********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:55 Saturday 28 March 2026 19:33:05 -0400 (0:00:00.063) 0:10:40.726 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Check that blivet supports PV grow to fill] ****************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:68 Saturday 28 March 2026 19:33:05 -0400 (0:00:00.030) 0:10:40.756 ******** ok: [managed-node12] => { "changed": false, "failed_when_result": false, "rc": 0 } STDERR: OpenSSH_9.9p1, OpenSSL 3.5.5 27 Jan 2026 debug1: Reading configuration data /root/.ssh/config debug1: Reading configuration data /etc/ssh/ssh_config debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf debug2: checking match for 'final all' host 10.31.11.27 originally 10.31.11.27 debug2: match not found debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config debug1: configuration requests final Match pass debug2: resolve_canonicalize: hostname 10.31.11.27 is address debug1: re-parsing configuration debug1: Reading configuration data /root/.ssh/config debug1: Reading configuration data /etc/ssh/ssh_config debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf debug2: checking match for 'final all' host 10.31.11.27 originally 10.31.11.27 debug2: match found debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config debug1: auto-mux: Trying existing master at '/root/.ansible/cp/e1141b92e1' debug2: fd 3 setting O_NONBLOCK debug2: mux_client_hello_exchange: master version 4 debug1: mux_client_request_session: master session id: 2 debug2: Received exit status from master 0 Shared connection to 10.31.11.27 closed. TASK [Verify that PVs fill the whole devices when they should] ***************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:78 Saturday 28 March 2026 19:33:06 -0400 (0:00:00.429) 0:10:41.186 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Check MD RAID] *********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:88 Saturday 28 March 2026 19:33:06 -0400 (0:00:00.018) 0:10:41.204 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml for managed-node12 TASK [Get information about RAID] ********************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:8 Saturday 28 March 2026 19:33:06 -0400 (0:00:00.057) 0:10:41.262 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set active devices regex] ************************************************ task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:14 Saturday 28 March 2026 19:33:06 -0400 (0:00:00.022) 0:10:41.284 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set spare devices regex] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:19 Saturday 28 March 2026 19:33:06 -0400 (0:00:00.023) 0:10:41.308 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set md version regex] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:24 Saturday 28 March 2026 19:33:06 -0400 (0:00:00.021) 0:10:41.329 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Set md chunk size regex] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:29 Saturday 28 March 2026 19:33:06 -0400 (0:00:00.020) 0:10:41.349 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Parse the chunk size] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:37 Saturday 28 March 2026 19:33:06 -0400 (0:00:00.020) 0:10:41.370 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID active devices count] ***************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:46 Saturday 28 March 2026 19:33:06 -0400 (0:00:00.020) 0:10:41.390 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID spare devices count] ****************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:55 Saturday 28 March 2026 19:33:06 -0400 (0:00:00.019) 0:10:41.410 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID metadata version] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:64 Saturday 28 March 2026 19:33:06 -0400 (0:00:00.020) 0:10:41.430 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Check RAID chunk size] *************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:74 Saturday 28 March 2026 19:33:06 -0400 (0:00:00.022) 0:10:41.452 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.raid_level != none", "skip_reason": "Conditional result was False" } TASK [Reset variables used by tests] ******************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-md.yml:83 Saturday 28 March 2026 19:33:06 -0400 (0:00:00.033) 0:10:41.486 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_md_active_devices_re": null, "storage_test_md_chunk_size_re": null, "storage_test_md_metadata_version_re": null, "storage_test_md_spare_devices_re": null }, "changed": false } TASK [Check LVM RAID] ********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:91 Saturday 28 March 2026 19:33:06 -0400 (0:00:00.037) 0:10:41.524 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml for managed-node12 TASK [Validate pool member LVM RAID settings] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-lvmraid.yml:2 Saturday 28 March 2026 19:33:06 -0400 (0:00:00.121) 0:10:41.645 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Check Thin Pools] ******************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:94 Saturday 28 March 2026 19:33:06 -0400 (0:00:00.035) 0:10:41.681 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml for managed-node12 TASK [Validate pool member thinpool settings] ********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-thin.yml:2 Saturday 28 March 2026 19:33:06 -0400 (0:00:00.104) 0:10:41.785 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Check member encryption] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:97 Saturday 28 March 2026 19:33:06 -0400 (0:00:00.027) 0:10:41.812 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml for managed-node12 TASK [Set test variables] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:5 Saturday 28 March 2026 19:33:06 -0400 (0:00:00.076) 0:10:41.889 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_expected_crypttab_entries": "0", "_storage_test_expected_crypttab_key_file": "-" }, "changed": false } TASK [Validate pool member LUKS settings] ************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:10 Saturday 28 March 2026 19:33:06 -0400 (0:00:00.045) 0:10:41.935 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Validate pool member crypttab entries] *********************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:17 Saturday 28 March 2026 19:33:06 -0400 (0:00:00.029) 0:10:41.965 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Clear test variables] **************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-encryption.yml:24 Saturday 28 March 2026 19:33:06 -0400 (0:00:00.019) 0:10:41.985 ******** ok: [managed-node12] => { "ansible_facts": { "_storage_test_crypttab_entries": null, "_storage_test_crypttab_key_file": null }, "changed": false } TASK [Check VDO] *************************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:100 Saturday 28 March 2026 19:33:06 -0400 (0:00:00.041) 0:10:42.026 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml for managed-node12 TASK [Validate pool member VDO settings] *************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-members-vdo.yml:2 Saturday 28 March 2026 19:33:07 -0400 (0:00:00.121) 0:10:42.148 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Check Stratis] *********************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:103 Saturday 28 March 2026 19:33:07 -0400 (0:00:00.025) 0:10:42.174 ******** included: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml for managed-node12 TASK [Get stratis pool information] ******************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:6 Saturday 28 March 2026 19:33:07 -0400 (0:00:00.110) 0:10:42.284 ******** ok: [managed-node12] => { "changed": false, "rc": 0 } STDOUT: null STDERR: OpenSSH_9.9p1, OpenSSL 3.5.5 27 Jan 2026 debug1: Reading configuration data /root/.ssh/config debug1: Reading configuration data /etc/ssh/ssh_config debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf debug2: checking match for 'final all' host 10.31.11.27 originally 10.31.11.27 debug2: match not found debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config debug1: configuration requests final Match pass debug2: resolve_canonicalize: hostname 10.31.11.27 is address debug1: re-parsing configuration debug1: Reading configuration data /root/.ssh/config debug1: Reading configuration data /etc/ssh/ssh_config debug1: Reading configuration data /etc/ssh/ssh_config.d/50-redhat.conf debug2: checking match for 'final all' host 10.31.11.27 originally 10.31.11.27 debug2: match found debug1: Reading configuration data /etc/crypto-policies/back-ends/openssh.config debug1: auto-mux: Trying existing master at '/root/.ansible/cp/e1141b92e1' debug2: fd 3 setting O_NONBLOCK debug2: mux_client_hello_exchange: master version 4 debug1: mux_client_request_session: master session id: 2 debug2: Received exit status from master 0 Shared connection to 10.31.11.27 closed. TASK [Print script output] ***************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:15 Saturday 28 March 2026 19:33:07 -0400 (0:00:00.422) 0:10:42.707 ******** ok: [managed-node12] => {} MSG: null TASK [Get information about Stratis] ******************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:19 Saturday 28 March 2026 19:33:07 -0400 (0:00:00.030) 0:10:42.737 ******** ok: [managed-node12] => { "ansible_facts": { "_stratis_pool_info": "" }, "changed": false } TASK [Verify that the pools was created] *************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:23 Saturday 28 March 2026 19:33:07 -0400 (0:00:00.033) 0:10:42.771 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.state == 'present'", "skip_reason": "Conditional result was False" } TASK [Verify that encryption is correctly set] ********************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:30 Saturday 28 March 2026 19:33:07 -0400 (0:00:00.031) 0:10:42.803 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.state == 'present'", "skip_reason": "Conditional result was False" } TASK [Verify that Clevis/Tang encryption is correctly set] ********************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:39 Saturday 28 March 2026 19:33:07 -0400 (0:00:00.035) 0:10:42.838 ******** skipping: [managed-node12] => { "changed": false, "false_condition": "storage_test_pool.state == 'present'", "skip_reason": "Conditional result was False" } TASK [Reset variable used by test] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-pool-stratis.yml:49 Saturday 28 March 2026 19:33:07 -0400 (0:00:00.022) 0:10:42.861 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_stratis_report": null }, "changed": false } TASK [Clean up test variables] ************************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-members.yml:106 Saturday 28 March 2026 19:33:07 -0400 (0:00:00.025) 0:10:42.887 ******** ok: [managed-node12] => { "ansible_facts": { "__pvs_lvm_len": null, "_storage_test_expected_pv_count": null, "_storage_test_expected_pv_type": null, "_storage_test_pool_pvs": [], "_storage_test_pool_pvs_lvm": [] }, "changed": false } TASK [Verify the volumes] ****************************************************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/test-verify-pool-volumes.yml:3 Saturday 28 March 2026 19:33:07 -0400 (0:00:00.026) 0:10:42.914 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Verify the volumes with no pool were correctly managed] ****************** task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:43 Saturday 28 March 2026 19:33:07 -0400 (0:00:00.016) 0:10:42.930 ******** skipping: [managed-node12] => { "changed": false, "skipped_reason": "No items in the list" } TASK [Clean up variable namespace] ********************************************* task path: /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/verify-role-results.yml:52 Saturday 28 March 2026 19:33:07 -0400 (0:00:00.020) 0:10:42.951 ******** ok: [managed-node12] => { "ansible_facts": { "storage_test_blkinfo": null, "storage_test_crypttab": null, "storage_test_fstab": null }, "changed": false } PLAY RECAP ********************************************************************* managed-node12 : ok=959 changed=25 unreachable=0 failed=0 skipped=1290 rescued=0 ignored=0 SYSTEM ROLES ERRORS BEGIN v1 [] SYSTEM ROLES ERRORS END v1 TASKS RECAP ******************************************************************** Saturday 28 March 2026 19:33:07 -0400 (0:00:00.029) 0:10:42.981 ******** =============================================================================== fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state -- 28.33s /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:88 fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state -- 25.72s /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:88 fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state -- 20.27s /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:88 fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state -- 19.16s /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:88 fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state -- 18.94s /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:88 fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state -- 17.00s /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:88 fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 8.16s /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:88 fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 7.20s /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:88 Gather package facts ---------------------------------------------------- 6.92s /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:23 fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 6.15s /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:88 fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 5.49s /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:88 fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 5.19s /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:88 Start stratisd service -------------------------------------------------- 4.87s /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:52 Gathering Facts --------------------------------------------------------- 4.57s /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/tests/storage/tests_stratis.yml:2 fedora.linux_system_roles.storage : Get service facts ------------------- 4.40s /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:76 fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 3.96s /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:88 fedora.linux_system_roles.storage : Get required packages --------------- 3.41s /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:37 fedora.linux_system_roles.storage : Make sure blivet is available ------- 3.40s /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:20 fedora.linux_system_roles.storage : Get service facts ------------------- 3.21s /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:76 fedora.linux_system_roles.storage : Get service facts ------------------- 3.12s /tmp/collections-aXM/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:76