ansible-playbook [core 2.17.14] config file = None configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /usr/local/lib/python3.12/site-packages/ansible ansible collection location = /tmp/collections-GW7 executable location = /usr/local/bin/ansible-playbook python version = 3.12.12 (main, Jan 16 2026, 00:00:00) [GCC 11.5.0 20240719 (Red Hat 11.5.0-14)] (/usr/bin/python3.12) jinja version = 3.1.6 libyaml = True No config file found; using defaults running playbook inside collection fedora.linux_system_roles Skipping callback 'debug', as we already have a stdout callback. Skipping callback 'json', as we already have a stdout callback. Skipping callback 'jsonl', as we already have a stdout callback. Skipping callback 'default', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. PLAYBOOK: tests_skip_toolkit.yml *********************************************** 1 plays in /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/tests/hpc/tests_skip_toolkit.yml PLAY [Verify if role configures a custom storage properly] ********************* TASK [Gathering Facts] ********************************************************* task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/tests/hpc/tests_skip_toolkit.yml:3 Friday 30 January 2026 16:03:25 -0500 (0:00:00.018) 0:00:00.018 ******** [WARNING]: Platform linux on host managed-node1 is using the discovered Python interpreter at /usr/bin/python3.9, but future installation of another Python interpreter could change the meaning of that path. See https://docs.ansible.com/ansible- core/2.17/reference_appendices/interpreter_discovery.html for more information. ok: [managed-node1] TASK [Skip unsupported architectures] ****************************************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/tests/hpc/tests_skip_toolkit.yml:24 Friday 30 January 2026 16:03:26 -0500 (0:00:01.088) 0:00:01.107 ******** included: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/tests/hpc/tasks/skip_unsupported_archs.yml for managed-node1 TASK [Gather architecture facts] *********************************************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/tests/hpc/tasks/skip_unsupported_archs.yml:3 Friday 30 January 2026 16:03:26 -0500 (0:00:00.012) 0:00:01.120 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "\"architecture\" not in ansible_facts.keys() | list", "skip_reason": "Conditional result was False" } TASK [Skip unsupported architectures] ****************************************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/tests/hpc/tasks/skip_unsupported_archs.yml:8 Friday 30 January 2026 16:03:26 -0500 (0:00:00.030) 0:00:01.150 ******** META: end_host conditional evaluated to False, continuing execution for managed-node1 skipping: [managed-node1] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1" } MSG: end_host conditional evaluated to false, continuing execution for managed-node1 TASK [Ensure test packages] **************************************************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/tests/hpc/tests_skip_toolkit.yml:32 Friday 30 January 2026 16:03:26 -0500 (0:00:00.004) 0:00:01.155 ******** ok: [managed-node1] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [Find unused disks in the system] ***************************************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/tests/hpc/tests_skip_toolkit.yml:41 Friday 30 January 2026 16:03:27 -0500 (0:00:01.271) 0:00:02.426 ******** ok: [managed-node1] => { "changed": false, "disks": [ "sda", "sdb" ], "info": [ "Line: NAME=\"/dev/sda\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG_SEC=\"512\"", "Line: NAME=\"/dev/sdb\" TYPE=\"disk\" SIZE=\"10737418240\" FSTYPE=\"\" LOG_SEC=\"512\"", "Line: NAME=\"/dev/xvda\" TYPE=\"disk\" SIZE=\"268435456000\" FSTYPE=\"\" LOG_SEC=\"512\"", "Line: NAME=\"/dev/xvda1\" TYPE=\"part\" SIZE=\"268434390528\" FSTYPE=\"xfs\" LOG_SEC=\"512\"", "Line type [part] is not disk: NAME=\"/dev/xvda1\" TYPE=\"part\" SIZE=\"268434390528\" FSTYPE=\"xfs\" LOG_SEC=\"512\"", "filename [xvda1] is a partition", "Disk [/dev/xvda] attrs [{'type': 'disk', 'size': '268435456000', 'fstype': '', 'ssize': '512'}] has partitions" ] } TASK [Debug why there are no unused disks] ************************************* task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/tests/hpc/tests_skip_toolkit.yml:50 Friday 30 January 2026 16:03:28 -0500 (0:00:00.462) 0:00:02.888 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "'Unable to find unused disk' in unused_disks_return.disks", "skip_reason": "Conditional result was False" } TASK [Set unused_disks if necessary] ******************************************* task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/tests/hpc/tests_skip_toolkit.yml:59 Friday 30 January 2026 16:03:28 -0500 (0:00:00.010) 0:00:02.899 ******** ok: [managed-node1] => { "ansible_facts": { "unused_disks": [ "sda", "sdb" ] }, "changed": false } TASK [Exit playbook when there's not enough unused disks in the system] ******** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/tests/hpc/tests_skip_toolkit.yml:64 Friday 30 January 2026 16:03:28 -0500 (0:00:00.014) 0:00:02.913 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "unused_disks | d([]) | length < disks_needed | d(1)", "skip_reason": "Conditional result was False" } TASK [Prepare storage] ********************************************************* task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/tests/hpc/tests_skip_toolkit.yml:69 Friday 30 January 2026 16:03:28 -0500 (0:00:00.029) 0:00:02.942 ******** included: fedora.linux_system_roles.storage for managed-node1 TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Friday 30 January 2026 16:03:28 -0500 (0:00:00.021) 0:00:02.964 ******** included: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node1 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Friday 30 January 2026 16:03:28 -0500 (0:00:00.015) 0:00:02.979 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Friday 30 January 2026 16:03:28 -0500 (0:00:00.032) 0:00:03.012 ******** skipping: [managed-node1] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node1] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node1] => (item=CentOS_9.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs", "stratisd", "stratis-cli", "{{ 'libblockdev-s390' if ansible_facts['architecture'] == 's390x' else 'libblockdev' }}" ] }, "ansible_included_var_files": [ "/tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_9.yml" } ok: [managed-node1] => (item=CentOS_9.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs", "stratisd", "stratis-cli", "{{ 'libblockdev-s390' if ansible_facts['architecture'] == 's390x' else 'libblockdev' }}" ] }, "ansible_included_var_files": [ "/tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_9.yml" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Friday 30 January 2026 16:03:28 -0500 (0:00:00.038) 0:00:03.050 ******** ok: [managed-node1] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Friday 30 January 2026 16:03:29 -0500 (0:00:00.429) 0:00:03.480 ******** ok: [managed-node1] => { "ansible_facts": { "__storage_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Friday 30 January 2026 16:03:29 -0500 (0:00:00.019) 0:00:03.500 ******** ok: [managed-node1] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Friday 30 January 2026 16:03:29 -0500 (0:00:00.012) 0:00:03.512 ******** ok: [managed-node1] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Friday 30 January 2026 16:03:29 -0500 (0:00:00.012) 0:00:03.525 ******** redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount included: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node1 TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 Friday 30 January 2026 16:03:29 -0500 (0:00:00.037) 0:00:03.562 ******** changed: [managed-node1] => { "changed": true, "rc": 0, "results": [ "Installed: cxl-libs-82-1.el9.x86_64", "Installed: libblockdev-lvm-2.28-16.el9.x86_64", "Installed: python3-wcwidth-0.2.5-8.el9.noarch", "Installed: python3-pyparted-1:3.12.0-1.el9.x86_64", "Installed: libluksmeta-10-1.el9.x86_64", "Installed: stratis-cli-3.7.0-1.el9.noarch", "Installed: python3-into-dbus-python-0.8.2-1.el9.noarch", "Installed: tpm2-tools-5.2-7.el9.x86_64", "Installed: libblockdev-mdraid-2.28-16.el9.x86_64", "Installed: ndctl-82-1.el9.x86_64", "Installed: clevis-21-209.el9.x86_64", "Installed: stratisd-3.7.3-1.el9.x86_64", "Installed: libblockdev-mpath-2.28-16.el9.x86_64", "Installed: ndctl-libs-82-1.el9.x86_64", "Installed: clevis-luks-21-209.el9.x86_64", "Installed: libblockdev-nvdimm-2.28-16.el9.x86_64", "Installed: python3-blivet-1:3.6.0-29.el9.noarch", "Installed: daxctl-libs-82-1.el9.x86_64", "Installed: lsof-4.94.0-3.el9.x86_64", "Installed: python3-blockdev-2.28-16.el9.x86_64", "Installed: libaio-0.3.111-13.el9.x86_64", "Installed: python3-justbases-0.15.2-1.el9.noarch", "Installed: python3-justbytes-0.15.2-1.el9.noarch", "Installed: python3-bytesize-2.5-3.el9.x86_64", "Installed: lvm2-9:2.03.32-2.el9.x86_64", "Installed: blivet-data-1:3.6.0-29.el9.noarch", "Installed: libblockdev-part-2.28-16.el9.x86_64", "Installed: lvm2-libs-9:2.03.32-2.el9.x86_64", "Installed: libblockdev-2.28-16.el9.x86_64", "Installed: volume_key-libs-0.3.12-16.el9.x86_64", "Installed: kmod-kvdo-8.2.6.3-185.el9.x86_64", "Installed: libblockdev-swap-2.28-16.el9.x86_64", "Installed: libjose-14-1.el9.x86_64", "Installed: device-mapper-event-9:1.02.206-2.el9.x86_64", "Installed: libblockdev-crypto-2.28-16.el9.x86_64", "Installed: python3-packaging-20.9-5.el9.noarch", "Installed: python3-dbus-client-gen-0.5.1-1.el9.noarch", "Installed: python3-dbus-python-client-gen-0.8.3-1.el9.noarch", "Installed: device-mapper-event-libs-9:1.02.206-2.el9.x86_64", "Installed: cryptsetup-2.8.1-2.el9.x86_64", "Installed: libblockdev-dm-2.28-16.el9.x86_64", "Installed: libblockdev-utils-2.28-16.el9.x86_64", "Installed: python3-dbus-signature-pyparsing-0.4.1-1.el9.noarch", "Installed: libblockdev-fs-2.28-16.el9.x86_64", "Installed: mdadm-4.4-3.el9.x86_64", "Installed: luksmeta-10-1.el9.x86_64", "Installed: device-mapper-multipath-0.8.7-42.el9.x86_64", "Installed: libnvme-1.16.1-2.el9.x86_64", "Installed: vdo-8.2.2.2-1.el9.x86_64", "Installed: jose-14-1.el9.x86_64", "Installed: libblockdev-loop-2.28-16.el9.x86_64", "Installed: libblockdev-kbd-2.28-16.el9.x86_64", "Installed: libbytesize-2.5-3.el9.x86_64", "Installed: device-mapper-multipath-libs-0.8.7-42.el9.x86_64", "Installed: device-mapper-persistent-data-1.1.0-1.el9.x86_64", "Installed: python3-psutil-5.8.0-12.el9.x86_64" ] } TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Friday 30 January 2026 16:05:09 -0500 (0:01:40.354) 0:01:43.917 ******** ok: [managed-node1] => { "storage_pools | d([])": [ { "disks": [ "sda", "sdb" ], "grow_to_fill": true, "name": "rootvg", "volumes": [ { "mount_point": "/hpc-test1", "name": "rootlv", "size": "2G" }, { "mount_point": "/hpc-test2", "name": "usrlv", "size": "1G" } ] } ] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14 Friday 30 January 2026 16:05:09 -0500 (0:00:00.037) 0:01:43.954 ******** ok: [managed-node1] => { "storage_volumes | d([])": [] } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 Friday 30 January 2026 16:05:09 -0500 (0:00:00.033) 0:01:43.988 ******** ok: [managed-node1] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [ "lvm2" ], "pools": [], "volumes": [] } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:32 Friday 30 January 2026 16:05:10 -0500 (0:00:01.113) 0:01:45.102 ******** included: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml for managed-node1 TASK [fedora.linux_system_roles.storage : Check if the COPR support packages should be installed] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:2 Friday 30 January 2026 16:05:10 -0500 (0:00:00.025) 0:01:45.127 ******** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Make sure COPR support packages are present] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:13 Friday 30 January 2026 16:05:10 -0500 (0:00:00.029) 0:01:45.157 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "install_copr | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable COPRs] ************************ task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:19 Friday 30 January 2026 16:05:10 -0500 (0:00:00.032) 0:01:45.189 ******** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:38 Friday 30 January 2026 16:05:10 -0500 (0:00:00.028) 0:01:45.218 ******** ok: [managed-node1] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:52 Friday 30 January 2026 16:05:11 -0500 (0:00:01.188) 0:01:46.406 ******** ok: [managed-node1] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "apt-daily.service": { "name": "apt-daily.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fcoe.service": { "name": "fcoe.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "iscsi-shutdown.service": { "name": "iscsi-shutdown.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsi.service": { "name": "iscsi.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsid.service": { "name": "iscsid.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-activation-early.service": { "name": "lvm2-activation-early.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ndctl-monitor.service": { "name": "ndctl-monitor.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "raid-check.service": { "name": "raid-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "rbdmap.service": { "name": "rbdmap.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sku_customisation.service": { "name": "sku_customisation.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "stratis-fstab-setup@.service": { "name": "stratis-fstab-setup@.service", "source": "systemd", "state": "unknown", "status": "static" }, "stratisd-min-postinitrd.service": { "name": "stratisd-min-postinitrd.service", "source": "systemd", "state": "inactive", "status": "static" }, "stratisd.service": { "name": "stratisd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles.service": { "name": "systemd-tmpfiles.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "yppasswdd.service": { "name": "yppasswdd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypserv.service": { "name": "ypserv.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypxfrd.service": { "name": "ypxfrd.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] ***** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:58 Friday 30 January 2026 16:05:14 -0500 (0:00:02.071) 0:01:48.478 ******** ok: [managed-node1] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:64 Friday 30 January 2026 16:05:14 -0500 (0:00:00.045) 0:01:48.524 ******** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 Friday 30 January 2026 16:05:14 -0500 (0:00:00.011) 0:01:48.535 ******** changed: [managed-node1] => { "actions": [ { "action": "create format", "device": "/dev/sdb", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sda", "fs_type": "lvmpv" }, { "action": "create device", "device": "/dev/rootvg", "fs_type": null }, { "action": "create device", "device": "/dev/mapper/rootvg-usrlv", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/rootvg-usrlv", "fs_type": "xfs" }, { "action": "create device", "device": "/dev/mapper/rootvg-rootlv", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/rootvg-rootlv", "fs_type": "xfs" } ], "changed": true, "crypts": [], "leaves": [ "/dev/xvda1", "/dev/mapper/rootvg-rootlv", "/dev/mapper/rootvg-usrlv" ], "mounts": [ { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/hpc-test1", "src": "/dev/mapper/rootvg-rootlv", "state": "mounted" }, { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/hpc-test2", "src": "/dev/mapper/rootvg-usrlv", "state": "mounted" } ], "packages": [ "xfsprogs", "lvm2" ], "pools": [ { "disks": [ "sda", "sdb" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": true, "name": "rootvg", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/rootvg-rootlv", "_kernel_device": "/dev/dm-1", "_mount_id": "/dev/mapper/rootvg-rootlv", "_raw_device": "/dev/mapper/rootvg-rootlv", "_raw_kernel_device": "/dev/dm-1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/hpc-test1", "mount_user": null, "name": "rootlv", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "2G", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/rootvg-usrlv", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/rootvg-usrlv", "_raw_device": "/dev/mapper/rootvg-usrlv", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/hpc-test2", "mount_user": null, "name": "usrlv", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "1G", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:85 Friday 30 January 2026 16:05:17 -0500 (0:00:03.142) 0:01:51.678 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "storage_udevadm_trigger | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ****** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:92 Friday 30 January 2026 16:05:17 -0500 (0:00:00.032) 0:01:51.711 ******** ok: [managed-node1] => { "changed": false, "stat": { "atime": 1769806606.483, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "1844436a001e022e547a31f3b22270b541234b02", "ctime": 1767628069.142, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 4194435, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1767628069.142, "nlink": 1, "path": "/etc/fstab", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1386, "uid": 0, "version": "3657942965", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:97 Friday 30 January 2026 16:05:17 -0500 (0:00:00.366) 0:01:52.077 ******** changed: [managed-node1] => { "backup": "", "changed": true } MSG: line added TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:115 Friday 30 January 2026 16:05:18 -0500 (0:00:00.451) 0:01:52.529 ******** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:121 Friday 30 January 2026 16:05:18 -0500 (0:00:00.011) 0:01:52.540 ******** ok: [managed-node1] => { "blivet_output": { "actions": [ { "action": "create format", "device": "/dev/sdb", "fs_type": "lvmpv" }, { "action": "create format", "device": "/dev/sda", "fs_type": "lvmpv" }, { "action": "create device", "device": "/dev/rootvg", "fs_type": null }, { "action": "create device", "device": "/dev/mapper/rootvg-usrlv", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/rootvg-usrlv", "fs_type": "xfs" }, { "action": "create device", "device": "/dev/mapper/rootvg-rootlv", "fs_type": null }, { "action": "create format", "device": "/dev/mapper/rootvg-rootlv", "fs_type": "xfs" } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/xvda1", "/dev/mapper/rootvg-rootlv", "/dev/mapper/rootvg-usrlv" ], "mounts": [ { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/hpc-test1", "src": "/dev/mapper/rootvg-rootlv", "state": "mounted" }, { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/hpc-test2", "src": "/dev/mapper/rootvg-usrlv", "state": "mounted" } ], "packages": [ "xfsprogs", "lvm2" ], "pools": [ { "disks": [ "sda", "sdb" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": true, "name": "rootvg", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/rootvg-rootlv", "_kernel_device": "/dev/dm-1", "_mount_id": "/dev/mapper/rootvg-rootlv", "_raw_device": "/dev/mapper/rootvg-rootlv", "_raw_kernel_device": "/dev/dm-1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/hpc-test1", "mount_user": null, "name": "rootlv", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "2G", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/rootvg-usrlv", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/rootvg-usrlv", "_raw_device": "/dev/mapper/rootvg-usrlv", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/hpc-test2", "mount_user": null, "name": "usrlv", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "1G", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:130 Friday 30 January 2026 16:05:18 -0500 (0:00:00.020) 0:01:52.561 ******** ok: [managed-node1] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda", "sdb" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": true, "name": "rootvg", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/rootvg-rootlv", "_kernel_device": "/dev/dm-1", "_mount_id": "/dev/mapper/rootvg-rootlv", "_raw_device": "/dev/mapper/rootvg-rootlv", "_raw_kernel_device": "/dev/dm-1", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/hpc-test1", "mount_user": null, "name": "rootlv", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "2G", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/rootvg-usrlv", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/rootvg-usrlv", "_raw_device": "/dev/mapper/rootvg-usrlv", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/hpc-test2", "mount_user": null, "name": "usrlv", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "1G", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:134 Friday 30 January 2026 16:05:18 -0500 (0:00:00.019) 0:01:52.581 ******** ok: [managed-node1] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:150 Friday 30 January 2026 16:05:18 -0500 (0:00:00.015) 0:01:52.597 ******** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:161 Friday 30 January 2026 16:05:18 -0500 (0:00:00.033) 0:01:52.630 ******** ok: [managed-node1] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:166 Friday 30 January 2026 16:05:19 -0500 (0:00:00.896) 0:01:53.527 ******** redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount changed: [managed-node1] => (item={'src': '/dev/mapper/rootvg-rootlv', 'path': '/hpc-test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => { "ansible_loop_var": "mount_info", "backup_file": "", "boot": "yes", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "xfs", "mount_info": { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/hpc-test1", "src": "/dev/mapper/rootvg-rootlv", "state": "mounted" }, "name": "/hpc-test1", "opts": "defaults", "passno": "0", "src": "/dev/mapper/rootvg-rootlv" } redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount changed: [managed-node1] => (item={'src': '/dev/mapper/rootvg-usrlv', 'path': '/hpc-test2', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => { "ansible_loop_var": "mount_info", "backup_file": "", "boot": "yes", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "xfs", "mount_info": { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/hpc-test2", "src": "/dev/mapper/rootvg-usrlv", "state": "mounted" }, "name": "/hpc-test2", "opts": "defaults", "passno": "0", "src": "/dev/mapper/rootvg-usrlv" } TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:177 Friday 30 January 2026 16:05:19 -0500 (0:00:00.853) 0:01:54.380 ******** skipping: [managed-node1] => (item={'src': '/dev/mapper/rootvg-rootlv', 'path': '/hpc-test1', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => { "ansible_loop_var": "mount_info", "changed": false, "false_condition": "mount_info['owner'] != none or mount_info['group'] != none or mount_info['mode'] != none", "mount_info": { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/hpc-test1", "src": "/dev/mapper/rootvg-rootlv", "state": "mounted" }, "skip_reason": "Conditional result was False" } skipping: [managed-node1] => (item={'src': '/dev/mapper/rootvg-usrlv', 'path': '/hpc-test2', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => { "ansible_loop_var": "mount_info", "changed": false, "false_condition": "mount_info['owner'] != none or mount_info['group'] != none or mount_info['mode'] != none", "mount_info": { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/hpc-test2", "src": "/dev/mapper/rootvg-usrlv", "state": "mounted" }, "skip_reason": "Conditional result was False" } skipping: [managed-node1] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:189 Friday 30 January 2026 16:05:19 -0500 (0:00:00.047) 0:01:54.427 ******** ok: [managed-node1] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:197 Friday 30 January 2026 16:05:20 -0500 (0:00:00.711) 0:01:55.139 ******** ok: [managed-node1] => { "changed": false, "stat": { "atime": 1769806764.355625, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1767624696.987, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 4194436, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1767624397.527, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "275126202", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:202 Friday 30 January 2026 16:05:21 -0500 (0:00:00.359) 0:01:55.498 ******** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:224 Friday 30 January 2026 16:05:21 -0500 (0:00:00.014) 0:01:55.513 ******** ok: [managed-node1] TASK [Run the role] ************************************************************ task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/tests/hpc/tests_skip_toolkit.yml:85 Friday 30 January 2026 16:05:21 -0500 (0:00:00.874) 0:01:56.387 ******** included: fedora.linux_system_roles.hpc for managed-node1 TASK [fedora.linux_system_roles.hpc : Set platform/version specific variables] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:3 Friday 30 January 2026 16:05:22 -0500 (0:00:00.126) 0:01:56.513 ******** included: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/set_vars.yml for managed-node1 TASK [fedora.linux_system_roles.hpc : Ensure ansible_facts used by role] ******* task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/set_vars.yml:2 Friday 30 January 2026 16:05:22 -0500 (0:00:00.021) 0:01:56.535 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "__hpc_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Check if system is ostree] *************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/set_vars.yml:10 Friday 30 January 2026 16:05:22 -0500 (0:00:00.036) 0:01:56.572 ******** ok: [managed-node1] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.hpc : Set flag to indicate system is ostree] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/set_vars.yml:15 Friday 30 January 2026 16:05:22 -0500 (0:00:00.355) 0:01:56.927 ******** ok: [managed-node1] => { "ansible_facts": { "__hpc_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.hpc : Set platform/version specific variables] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/set_vars.yml:19 Friday 30 January 2026 16:05:22 -0500 (0:00:00.022) 0:01:56.950 ******** skipping: [managed-node1] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node1] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node1] => (item=CentOS_9.yml) => { "ansible_facts": { "__hpc_cuda_toolkit_packages": [ "cuda-toolkit-12-9" ], "__hpc_gdrcopy_info": { "distribution": "el9", "name": "gdrcopy", "sha256": "d6c61358adb52d9a9b71d4e05cb6ac9288ac18274e8d8177a6f197f0ee006130", "url": "https://github.com/NVIDIA/gdrcopy/archive/0f7366e73b019e7facf907381f6b0b2f5a1576e4.tar.gz", "version": "2.5.1-1" }, "__hpc_hpcx_info": { "name": "hpcx", "sha256": "92f746dd8cf293cf5b3955a0addd92e162dd012e1f8f728983a85c6c134e33b0", "url": "https://content.mellanox.com/hpc/hpc-x/v2.24.1_cuda12/hpcx-v2.24.1-gcc-inbox-redhat9-cuda12-x86_64.tbz", "version": "2.24.1" }, "__hpc_microsoft_prod_repo": { "baseurl": "https://packages.microsoft.com/rhel/9/prod/", "description": "Microsoft Production repository", "key": "https://packages.microsoft.com/keys/microsoft.asc", "name": "microsoft-prod" }, "__hpc_moneo_info": { "name": "moneo", "sha256": "bab588b37f9a7d03fff82ff22d8a24c18a64e18eb2dad31f447a67b6fb76bd4c", "url": "https://github.com/Azure/Moneo/archive/refs/tags/v0.3.4.tar.gz", "version": "0.3.4" }, "__hpc_nvidia_cuda_repo": { "baseurl": "https://developer.download.nvidia.com/compute/cuda/repos/rhel9/x86_64", "description": "NVIDIA CUDA repository", "key": "https://developer.download.nvidia.com/compute/cuda/repos/rhel9/x86_64/D42D0685.pub", "name": "nvidia-cuda" }, "__hpc_nvidia_driver_module": "nvidia-driver:575-dkms", "__hpc_nvidia_nccl_packages": [ "libnccl-2.27.5-1+cuda12.9", "libnccl-devel-2.27.5-1+cuda12.9" ], "__hpc_openmpi_info": { "name": "openmpi", "sha256": "53131e1a57e7270f645707f8b0b65ba56048f5b5ac3f68faabed3eb0d710e449", "url": "https://download.open-mpi.org/release/open-mpi/v5.0/openmpi-5.0.8.tar.bz2", "version": "5.0.8" }, "__hpc_pmix_info": { "name": "pmix", "sha256": "6b11f4fd5c9d7f8e55fc6ebdee9af04b839f44d06044e58cea38c87c168784b3", "url": "https://github.com/openpmix/openpmix/releases/download/v4.2.9/pmix-4.2.9.tar.bz2", "version": "4.2.9" }, "__hpc_rhel_epel_repo": { "description": "RHEL EPEL repository", "key": "https://dl.fedoraproject.org/pub/epel/RPM-GPG-KEY-EPEL-9", "name": "RHEL EPEL repository", "rpm": "https://dl.fedoraproject.org/pub/epel/epel-release-latest-9.noarch.rpm" }, "__hpc_rhui_azure_rhel_9_eus_repo": { "baseurl": "https://rhui4-1.microsoft.com/pulp/repos/unprotected/microsoft-azure-rhel9-eus", "description": "Microsoft Azure RPMs for Red Hat Enterprise Linux 9 EUS", "key": "file:///etc/pki/rpm-gpg/RPM-GPG-KEY-microsoft-azure-release", "name": "rhui-microsoft-azure-rhel9-eus" } }, "ansible_included_var_files": [ "/tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_9.yml" } ok: [managed-node1] => (item=CentOS_9.yml) => { "ansible_facts": { "__hpc_cuda_toolkit_packages": [ "cuda-toolkit-12-9" ], "__hpc_gdrcopy_info": { "distribution": "el9", "name": "gdrcopy", "sha256": "d6c61358adb52d9a9b71d4e05cb6ac9288ac18274e8d8177a6f197f0ee006130", "url": "https://github.com/NVIDIA/gdrcopy/archive/0f7366e73b019e7facf907381f6b0b2f5a1576e4.tar.gz", "version": "2.5.1-1" }, "__hpc_hpcx_info": { "name": "hpcx", "sha256": "92f746dd8cf293cf5b3955a0addd92e162dd012e1f8f728983a85c6c134e33b0", "url": "https://content.mellanox.com/hpc/hpc-x/v2.24.1_cuda12/hpcx-v2.24.1-gcc-inbox-redhat9-cuda12-x86_64.tbz", "version": "2.24.1" }, "__hpc_microsoft_prod_repo": { "baseurl": "https://packages.microsoft.com/rhel/9/prod/", "description": "Microsoft Production repository", "key": "https://packages.microsoft.com/keys/microsoft.asc", "name": "microsoft-prod" }, "__hpc_moneo_info": { "name": "moneo", "sha256": "bab588b37f9a7d03fff82ff22d8a24c18a64e18eb2dad31f447a67b6fb76bd4c", "url": "https://github.com/Azure/Moneo/archive/refs/tags/v0.3.4.tar.gz", "version": "0.3.4" }, "__hpc_nvidia_cuda_repo": { "baseurl": "https://developer.download.nvidia.com/compute/cuda/repos/rhel9/x86_64", "description": "NVIDIA CUDA repository", "key": "https://developer.download.nvidia.com/compute/cuda/repos/rhel9/x86_64/D42D0685.pub", "name": "nvidia-cuda" }, "__hpc_nvidia_driver_module": "nvidia-driver:575-dkms", "__hpc_nvidia_nccl_packages": [ "libnccl-2.27.5-1+cuda12.9", "libnccl-devel-2.27.5-1+cuda12.9" ], "__hpc_openmpi_info": { "name": "openmpi", "sha256": "53131e1a57e7270f645707f8b0b65ba56048f5b5ac3f68faabed3eb0d710e449", "url": "https://download.open-mpi.org/release/open-mpi/v5.0/openmpi-5.0.8.tar.bz2", "version": "5.0.8" }, "__hpc_pmix_info": { "name": "pmix", "sha256": "6b11f4fd5c9d7f8e55fc6ebdee9af04b839f44d06044e58cea38c87c168784b3", "url": "https://github.com/openpmix/openpmix/releases/download/v4.2.9/pmix-4.2.9.tar.bz2", "version": "4.2.9" }, "__hpc_rhel_epel_repo": { "description": "RHEL EPEL repository", "key": "https://dl.fedoraproject.org/pub/epel/RPM-GPG-KEY-EPEL-9", "name": "RHEL EPEL repository", "rpm": "https://dl.fedoraproject.org/pub/epel/epel-release-latest-9.noarch.rpm" }, "__hpc_rhui_azure_rhel_9_eus_repo": { "baseurl": "https://rhui4-1.microsoft.com/pulp/repos/unprotected/microsoft-azure-rhel9-eus", "description": "Microsoft Azure RPMs for Red Hat Enterprise Linux 9 EUS", "key": "file:///etc/pki/rpm-gpg/RPM-GPG-KEY-microsoft-azure-release", "name": "rhui-microsoft-azure-rhel9-eus" } }, "ansible_included_var_files": [ "/tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_9.yml" } TASK [fedora.linux_system_roles.hpc : Fail on unsupported architectures] ******* task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:6 Friday 30 January 2026 16:05:22 -0500 (0:00:00.045) 0:01:56.995 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "ansible_facts['architecture'] != 'x86_64'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Fail if role installs openmpi without cuda toolkit] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:14 Friday 30 January 2026 16:05:22 -0500 (0:00:00.015) 0:01:57.010 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "hpc_build_openmpi_w_nvidia_gpu_support", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Deploy GPG keys for repositories] ******** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:27 Friday 30 January 2026 16:05:22 -0500 (0:00:00.013) 0:01:57.024 ******** ok: [managed-node1] => (item={'name': 'RHEL EPEL repository', 'description': 'RHEL EPEL repository', 'key': 'https://dl.fedoraproject.org/pub/epel/RPM-GPG-KEY-EPEL-9', 'rpm': 'https://dl.fedoraproject.org/pub/epel/epel-release-latest-9.noarch.rpm'}) => { "ansible_loop_var": "item", "changed": false, "item": { "description": "RHEL EPEL repository", "key": "https://dl.fedoraproject.org/pub/epel/RPM-GPG-KEY-EPEL-9", "name": "RHEL EPEL repository", "rpm": "https://dl.fedoraproject.org/pub/epel/epel-release-latest-9.noarch.rpm" } } ok: [managed-node1] => (item={'name': 'nvidia-cuda', 'description': 'NVIDIA CUDA repository', 'key': 'https://developer.download.nvidia.com/compute/cuda/repos/rhel9/x86_64/D42D0685.pub', 'baseurl': 'https://developer.download.nvidia.com/compute/cuda/repos/rhel9/x86_64'}) => { "ansible_loop_var": "item", "changed": false, "item": { "baseurl": "https://developer.download.nvidia.com/compute/cuda/repos/rhel9/x86_64", "description": "NVIDIA CUDA repository", "key": "https://developer.download.nvidia.com/compute/cuda/repos/rhel9/x86_64/D42D0685.pub", "name": "nvidia-cuda" } } ok: [managed-node1] => (item={'name': 'microsoft-prod', 'description': 'Microsoft Production repository', 'key': 'https://packages.microsoft.com/keys/microsoft.asc', 'baseurl': 'https://packages.microsoft.com/rhel/9/prod/'}) => { "ansible_loop_var": "item", "changed": false, "item": { "baseurl": "https://packages.microsoft.com/rhel/9/prod/", "description": "Microsoft Production repository", "key": "https://packages.microsoft.com/keys/microsoft.asc", "name": "microsoft-prod" } } TASK [fedora.linux_system_roles.hpc : Install EPEL release package] ************ task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:37 Friday 30 January 2026 16:05:24 -0500 (0:00:01.599) 0:01:58.624 ******** ok: [managed-node1] => { "changed": false, "rc": 0, "results": [ "Installed /root/.ansible/tmp/ansible-tmp-1769807124.2193232-10967-201420544269031/epel-release-latest-9.noarchzp98efsb.rpm" ] } MSG: Nothing to do TASK [fedora.linux_system_roles.hpc : Configure repositories] ****************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:43 Friday 30 January 2026 16:05:25 -0500 (0:00:01.388) 0:02:00.012 ******** redirecting (type: action) ansible.builtin.yum to ansible.builtin.dnf ok: [managed-node1] => (item={'name': 'nvidia-cuda', 'description': 'NVIDIA CUDA repository', 'key': 'https://developer.download.nvidia.com/compute/cuda/repos/rhel9/x86_64/D42D0685.pub', 'baseurl': 'https://developer.download.nvidia.com/compute/cuda/repos/rhel9/x86_64'}) => { "ansible_loop_var": "item", "changed": false, "item": { "baseurl": "https://developer.download.nvidia.com/compute/cuda/repos/rhel9/x86_64", "description": "NVIDIA CUDA repository", "key": "https://developer.download.nvidia.com/compute/cuda/repos/rhel9/x86_64/D42D0685.pub", "name": "nvidia-cuda" }, "repo": "nvidia-cuda", "state": "present" } redirecting (type: action) ansible.builtin.yum to ansible.builtin.dnf ok: [managed-node1] => (item={'name': 'microsoft-prod', 'description': 'Microsoft Production repository', 'key': 'https://packages.microsoft.com/keys/microsoft.asc', 'baseurl': 'https://packages.microsoft.com/rhel/9/prod/'}) => { "ansible_loop_var": "item", "changed": false, "item": { "baseurl": "https://packages.microsoft.com/rhel/9/prod/", "description": "Microsoft Production repository", "key": "https://packages.microsoft.com/keys/microsoft.asc", "name": "microsoft-prod" }, "repo": "microsoft-prod", "state": "present" } TASK [fedora.linux_system_roles.hpc : Get list of installed repositories] ****** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:56 Friday 30 January 2026 16:05:26 -0500 (0:00:00.789) 0:02:00.802 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "hpc_enable_eus_repo", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Ensure that the non-EUS RHUI Azure repository is not installed] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:61 Friday 30 January 2026 16:05:26 -0500 (0:00:00.013) 0:02:00.815 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "hpc_enable_eus_repo", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Create a temp file for the EUS repository configuration] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:72 Friday 30 January 2026 16:05:26 -0500 (0:00:00.014) 0:02:00.830 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "hpc_enable_eus_repo", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Generate the repository configuration template] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:79 Friday 30 January 2026 16:05:26 -0500 (0:00:00.013) 0:02:00.844 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "hpc_enable_eus_repo", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Add EUS repository] ********************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:87 Friday 30 January 2026 16:05:26 -0500 (0:00:00.012) 0:02:00.857 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "hpc_enable_eus_repo", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Lock the RHEL minor release to the current minor release] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:94 Friday 30 January 2026 16:05:26 -0500 (0:00:00.013) 0:02:00.870 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "hpc_enable_eus_repo", "skip_reason": "Conditional result was False" } TASK [Configure firewall to use trusted zone as default] *********************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:102 Friday 30 January 2026 16:05:26 -0500 (0:00:00.013) 0:02:00.883 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "hpc_manage_firewall", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Validate storage size formats] *********** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:114 Friday 30 January 2026 16:05:26 -0500 (0:00:00.012) 0:02:00.896 ******** ok: [managed-node1] => { "changed": false } MSG: All assertions passed TASK [fedora.linux_system_roles.hpc : Install lvm2 to get lvs command] ********* task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:124 Friday 30 January 2026 16:05:26 -0500 (0:00:00.022) 0:02:00.919 ******** ok: [managed-node1] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.hpc : Check if rootlv exists] ****************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:130 Friday 30 January 2026 16:05:27 -0500 (0:00:01.188) 0:02:02.108 ******** ok: [managed-node1] => { "changed": false, "stat": { "atime": 1769807121.619241, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1769807117.0992353, "dev": 6, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 657, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": true, "isreg": false, "issock": false, "isuid": false, "lnk_source": "/dev/dm-1", "lnk_target": "../dm-1", "mimetype": "inode/symlink", "mode": "0777", "mtime": 1769807117.0992353, "nlink": 1, "path": "/dev/mapper/rootvg-rootlv", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 7, "uid": 0, "version": null, "wgrp": true, "woth": true, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.hpc : Check if usrlv exists] ******************* task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:135 Friday 30 January 2026 16:05:27 -0500 (0:00:00.354) 0:02:02.462 ******** ok: [managed-node1] => { "changed": false, "stat": { "atime": 1769807121.629241, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1769807116.1092339, "dev": 6, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 607, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": true, "isreg": false, "issock": false, "isuid": false, "lnk_source": "/dev/dm-0", "lnk_target": "../dm-0", "mimetype": "inode/symlink", "mode": "0777", "mtime": 1769807116.1092339, "nlink": 1, "path": "/dev/mapper/rootvg-usrlv", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 7, "uid": 0, "version": null, "wgrp": true, "woth": true, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.hpc : Check if varlv exists] ******************* task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:140 Friday 30 January 2026 16:05:28 -0500 (0:00:00.359) 0:02:02.822 ******** ok: [managed-node1] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.hpc : Get current LV size of rootlv] *********** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:145 Friday 30 January 2026 16:05:28 -0500 (0:00:00.350) 0:02:03.172 ******** ok: [managed-node1] => { "changed": false, "cmd": [ "lvs", "--noheadings", "--units", "g", "--nosuffix", "-o", "lv_size", "/dev/mapper/rootvg-rootlv" ], "delta": "0:00:00.029638", "end": "2026-01-30 16:05:29.108957", "rc": 0, "start": "2026-01-30 16:05:29.079319" } STDOUT: 2.00 TASK [fedora.linux_system_roles.hpc : Get current LV size of usrlv] ************ task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:151 Friday 30 January 2026 16:05:29 -0500 (0:00:00.455) 0:02:03.628 ******** ok: [managed-node1] => { "changed": false, "cmd": [ "lvs", "--noheadings", "--units", "g", "--nosuffix", "-o", "lv_size", "/dev/mapper/rootvg-usrlv" ], "delta": "0:00:00.030077", "end": "2026-01-30 16:05:29.490975", "rc": 0, "start": "2026-01-30 16:05:29.460898" } STDOUT: 1.00 TASK [fedora.linux_system_roles.hpc : Get current LV size of varlv] ************ task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:157 Friday 30 January 2026 16:05:29 -0500 (0:00:00.384) 0:02:04.012 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "__hpc_varlv_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Initialize volumes list] ***************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:163 Friday 30 January 2026 16:05:29 -0500 (0:00:00.015) 0:02:04.027 ******** ok: [managed-node1] => { "ansible_facts": { "__hpc_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.hpc : Add rootlv if exists and needs expansion] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:167 Friday 30 January 2026 16:05:29 -0500 (0:00:00.017) 0:02:04.044 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "size_expected > size_current", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Add usrlv if exists and needs expansion] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:181 Friday 30 January 2026 16:05:29 -0500 (0:00:00.038) 0:02:04.083 ******** ok: [managed-node1] => { "ansible_facts": { "__hpc_volumes": [ { "mount_point": "/hpc-test2", "name": "usrlv", "size": "2G" } ] }, "changed": false } TASK [fedora.linux_system_roles.hpc : Add varlv if exists and needs expansion] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:195 Friday 30 January 2026 16:05:29 -0500 (0:00:00.044) 0:02:04.128 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "__hpc_varlv_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [Configure storage] ******************************************************* task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:209 Friday 30 January 2026 16:05:29 -0500 (0:00:00.014) 0:02:04.143 ******** included: fedora.linux_system_roles.storage for managed-node1 TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Friday 30 January 2026 16:05:29 -0500 (0:00:00.048) 0:02:04.192 ******** included: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node1 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Friday 30 January 2026 16:05:29 -0500 (0:00:00.022) 0:02:04.215 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Friday 30 January 2026 16:05:29 -0500 (0:00:00.039) 0:02:04.254 ******** skipping: [managed-node1] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node1] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node1] => (item=CentOS_9.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs", "stratisd", "stratis-cli", "{{ 'libblockdev-s390' if ansible_facts['architecture'] == 's390x' else 'libblockdev' }}" ] }, "ansible_included_var_files": [ "/tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_9.yml" } ok: [managed-node1] => (item=CentOS_9.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs", "stratisd", "stratis-cli", "{{ 'libblockdev-s390' if ansible_facts['architecture'] == 's390x' else 'libblockdev' }}" ] }, "ansible_included_var_files": [ "/tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_9.yml" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Friday 30 January 2026 16:05:29 -0500 (0:00:00.051) 0:02:04.306 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "not __storage_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Friday 30 January 2026 16:05:29 -0500 (0:00:00.019) 0:02:04.326 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "not __storage_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Friday 30 January 2026 16:05:29 -0500 (0:00:00.019) 0:02:04.345 ******** ok: [managed-node1] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Friday 30 January 2026 16:05:29 -0500 (0:00:00.017) 0:02:04.363 ******** ok: [managed-node1] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Friday 30 January 2026 16:05:29 -0500 (0:00:00.017) 0:02:04.380 ******** redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount included: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node1 TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 Friday 30 January 2026 16:05:29 -0500 (0:00:00.040) 0:02:04.422 ******** ok: [managed-node1] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Friday 30 January 2026 16:05:31 -0500 (0:00:01.208) 0:02:05.631 ******** ok: [managed-node1] => { "storage_pools | d([])": [ { "grow_to_fill": true, "name": "rootvg", "volumes": [ { "mount_point": "/hpc-test2", "name": "usrlv", "size": "2G" } ] } ] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14 Friday 30 January 2026 16:05:31 -0500 (0:00:00.041) 0:02:05.672 ******** ok: [managed-node1] => { "storage_volumes | d([])": [] } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 Friday 30 January 2026 16:05:31 -0500 (0:00:00.062) 0:02:05.735 ******** ok: [managed-node1] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [ "lvm2" ], "pools": [], "volumes": [] } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:32 Friday 30 January 2026 16:05:32 -0500 (0:00:01.284) 0:02:07.019 ******** included: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml for managed-node1 TASK [fedora.linux_system_roles.storage : Check if the COPR support packages should be installed] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:2 Friday 30 January 2026 16:05:32 -0500 (0:00:00.051) 0:02:07.071 ******** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Make sure COPR support packages are present] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:13 Friday 30 January 2026 16:05:32 -0500 (0:00:00.032) 0:02:07.103 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "install_copr | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable COPRs] ************************ task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:19 Friday 30 January 2026 16:05:32 -0500 (0:00:00.034) 0:02:07.138 ******** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:38 Friday 30 January 2026 16:05:32 -0500 (0:00:00.030) 0:02:07.169 ******** ok: [managed-node1] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:52 Friday 30 January 2026 16:05:33 -0500 (0:00:01.177) 0:02:08.347 ******** ok: [managed-node1] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "apt-daily.service": { "name": "apt-daily.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fcoe.service": { "name": "fcoe.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "iscsi-shutdown.service": { "name": "iscsi-shutdown.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsi.service": { "name": "iscsi.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsid.service": { "name": "iscsid.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-activation-early.service": { "name": "lvm2-activation-early.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ndctl-monitor.service": { "name": "ndctl-monitor.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "raid-check.service": { "name": "raid-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "rbdmap.service": { "name": "rbdmap.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sku_customisation.service": { "name": "sku_customisation.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "stratis-fstab-setup@.service": { "name": "stratis-fstab-setup@.service", "source": "systemd", "state": "unknown", "status": "static" }, "stratisd-min-postinitrd.service": { "name": "stratisd-min-postinitrd.service", "source": "systemd", "state": "inactive", "status": "static" }, "stratisd.service": { "name": "stratisd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles.service": { "name": "systemd-tmpfiles.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "yppasswdd.service": { "name": "yppasswdd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypserv.service": { "name": "ypserv.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypxfrd.service": { "name": "ypxfrd.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] ***** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:58 Friday 30 January 2026 16:05:35 -0500 (0:00:01.893) 0:02:10.240 ******** ok: [managed-node1] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:64 Friday 30 January 2026 16:05:35 -0500 (0:00:00.050) 0:02:10.290 ******** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 Friday 30 January 2026 16:05:35 -0500 (0:00:00.013) 0:02:10.303 ******** changed: [managed-node1] => { "actions": [ { "action": "resize device", "device": "/dev/mapper/rootvg-usrlv", "fs_type": null }, { "action": "resize format", "device": "/dev/mapper/rootvg-usrlv", "fs_type": "xfs" } ], "changed": true, "crypts": [], "leaves": [ "/dev/mapper/rootvg-rootlv", "/dev/mapper/rootvg-usrlv", "/dev/xvda1" ], "mounts": [ { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/hpc-test2", "src": "/dev/mapper/rootvg-usrlv", "state": "mounted" } ], "packages": [ "lvm2", "xfsprogs" ], "pools": [ { "disks": [ "sda", "sdb" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": true, "name": "rootvg", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/rootvg-usrlv", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/rootvg-usrlv", "_raw_device": "/dev/mapper/rootvg-usrlv", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/hpc-test2", "mount_user": null, "name": "usrlv", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "2G", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:85 Friday 30 January 2026 16:05:37 -0500 (0:00:01.633) 0:02:11.937 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "storage_udevadm_trigger | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ****** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:92 Friday 30 January 2026 16:05:37 -0500 (0:00:00.034) 0:02:11.972 ******** ok: [managed-node1] => { "changed": false, "stat": { "atime": 1769807119.8532386, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "4d88452b6790937bcc1e249bb6e9d737c35ff46b", "ctime": 1769807119.8502386, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 494929982, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1769807119.8502386, "nlink": 1, "path": "/etc/fstab", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1515, "uid": 0, "version": "872395520", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:97 Friday 30 January 2026 16:05:37 -0500 (0:00:00.368) 0:02:12.340 ******** ok: [managed-node1] => { "backup": "", "changed": false } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:115 Friday 30 January 2026 16:05:38 -0500 (0:00:00.360) 0:02:12.701 ******** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:121 Friday 30 January 2026 16:05:38 -0500 (0:00:00.013) 0:02:12.714 ******** ok: [managed-node1] => { "blivet_output": { "actions": [ { "action": "resize device", "device": "/dev/mapper/rootvg-usrlv", "fs_type": null }, { "action": "resize format", "device": "/dev/mapper/rootvg-usrlv", "fs_type": "xfs" } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/mapper/rootvg-rootlv", "/dev/mapper/rootvg-usrlv", "/dev/xvda1" ], "mounts": [ { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/hpc-test2", "src": "/dev/mapper/rootvg-usrlv", "state": "mounted" } ], "packages": [ "lvm2", "xfsprogs" ], "pools": [ { "disks": [ "sda", "sdb" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": true, "name": "rootvg", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/rootvg-usrlv", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/rootvg-usrlv", "_raw_device": "/dev/mapper/rootvg-usrlv", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/hpc-test2", "mount_user": null, "name": "usrlv", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "2G", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:130 Friday 30 January 2026 16:05:38 -0500 (0:00:00.023) 0:02:12.737 ******** ok: [managed-node1] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda", "sdb" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": true, "name": "rootvg", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "present", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/rootvg-usrlv", "_kernel_device": "/dev/dm-0", "_mount_id": "/dev/mapper/rootvg-usrlv", "_raw_device": "/dev/mapper/rootvg-usrlv", "_raw_kernel_device": "/dev/dm-0", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/hpc-test2", "mount_user": null, "name": "usrlv", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "2G", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:134 Friday 30 January 2026 16:05:38 -0500 (0:00:00.021) 0:02:12.759 ******** ok: [managed-node1] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:150 Friday 30 January 2026 16:05:38 -0500 (0:00:00.018) 0:02:12.778 ******** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:161 Friday 30 January 2026 16:05:38 -0500 (0:00:00.039) 0:02:12.818 ******** ok: [managed-node1] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:166 Friday 30 January 2026 16:05:39 -0500 (0:00:00.699) 0:02:13.517 ******** redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount ok: [managed-node1] => (item={'src': '/dev/mapper/rootvg-usrlv', 'path': '/hpc-test2', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => { "ansible_loop_var": "mount_info", "backup_file": "", "boot": "yes", "changed": false, "dump": "0", "fstab": "/etc/fstab", "fstype": "xfs", "mount_info": { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/hpc-test2", "src": "/dev/mapper/rootvg-usrlv", "state": "mounted" }, "name": "/hpc-test2", "opts": "defaults", "passno": "0", "src": "/dev/mapper/rootvg-usrlv" } TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:177 Friday 30 January 2026 16:05:39 -0500 (0:00:00.387) 0:02:13.904 ******** skipping: [managed-node1] => (item={'src': '/dev/mapper/rootvg-usrlv', 'path': '/hpc-test2', 'fstype': 'xfs', 'opts': 'defaults', 'dump': 0, 'passno': 0, 'state': 'mounted', 'owner': None, 'group': None, 'mode': None}) => { "ansible_loop_var": "mount_info", "changed": false, "false_condition": "mount_info['owner'] != none or mount_info['group'] != none or mount_info['mode'] != none", "mount_info": { "dump": 0, "fstype": "xfs", "group": null, "mode": null, "opts": "defaults", "owner": null, "passno": 0, "path": "/hpc-test2", "src": "/dev/mapper/rootvg-usrlv", "state": "mounted" }, "skip_reason": "Conditional result was False" } skipping: [managed-node1] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:189 Friday 30 January 2026 16:05:39 -0500 (0:00:00.042) 0:02:13.947 ******** ok: [managed-node1] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:197 Friday 30 January 2026 16:05:40 -0500 (0:00:00.682) 0:02:14.630 ******** ok: [managed-node1] => { "changed": false, "stat": { "atime": 1769806764.355625, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1767624696.987, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 4194436, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1767624397.527, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "275126202", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:202 Friday 30 January 2026 16:05:40 -0500 (0:00:00.358) 0:02:14.988 ******** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:224 Friday 30 January 2026 16:05:40 -0500 (0:00:00.013) 0:02:15.001 ******** ok: [managed-node1] TASK [fedora.linux_system_roles.hpc : Force install kernel version] ************ task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:219 Friday 30 January 2026 16:05:41 -0500 (0:00:00.878) 0:02:15.880 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "__hpc_force_kernel_version is not none", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Update kernel] *************************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:227 Friday 30 January 2026 16:05:41 -0500 (0:00:00.018) 0:02:15.898 ******** ok: [managed-node1] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.hpc : Get package facts] *********************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:236 Friday 30 January 2026 16:05:42 -0500 (0:00:01.184) 0:02:17.083 ******** ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.hpc : Install kernel-devel and kernel-headers packages for all kernels] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:241 Friday 30 January 2026 16:05:43 -0500 (0:00:01.139) 0:02:18.223 ******** ok: [managed-node1] => (item={'name': 'kernel', 'version': '5.14.0', 'release': '654.el9', 'epoch': None, 'arch': 'x86_64', 'source': 'rpm'}) => { "ansible_loop_var": "item", "changed": false, "item": { "arch": "x86_64", "epoch": null, "name": "kernel", "release": "654.el9", "source": "rpm", "version": "5.14.0" }, "rc": 0, "results": [] } MSG: Nothing to do ok: [managed-node1] => (item={'name': 'kernel', 'version': '5.14.0', 'release': '655.el9', 'epoch': None, 'arch': 'x86_64', 'source': 'rpm'}) => { "ansible_loop_var": "item", "changed": false, "item": { "arch": "x86_64", "epoch": null, "name": "kernel", "release": "655.el9", "source": "rpm", "version": "5.14.0" }, "rc": 0, "results": [] } MSG: Nothing to do ok: [managed-node1] => (item={'name': 'kernel', 'version': '5.14.0', 'release': '665.el9', 'epoch': None, 'arch': 'x86_64', 'source': 'rpm'}) => { "ansible_loop_var": "item", "changed": false, "item": { "arch": "x86_64", "epoch": null, "name": "kernel", "release": "665.el9", "source": "rpm", "version": "5.14.0" }, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.hpc : Ensure that dnf-command(versionlock) is installed] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:252 Friday 30 January 2026 16:05:47 -0500 (0:00:03.512) 0:02:21.736 ******** ok: [managed-node1] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.hpc : Check if kernel versionlock entries exist] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:257 Friday 30 January 2026 16:05:48 -0500 (0:00:01.152) 0:02:22.888 ******** ok: [managed-node1] => { "changed": false, "stat": { "atime": 1769807007.8164768, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1769807004.5655146, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 508293, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0644", "mtime": 1769807004.5655146, "nlink": 1, "path": "/etc/dnf/plugins/versionlock.list", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 0, "uid": 0, "version": "711100237", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.hpc : Prevent installation of all kernel packages of a different version] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:266 Friday 30 January 2026 16:05:48 -0500 (0:00:00.358) 0:02:23.247 ******** changed: [managed-node1] => (item=kernel) => { "ansible_loop_var": "item", "changed": true, "cmd": [ "dnf", "versionlock", "add", "kernel" ], "delta": "0:00:00.903454", "end": "2026-01-30 16:05:49.976400", "item": "kernel", "rc": 0, "start": "2026-01-30 16:05:49.072946" } STDOUT: Last metadata expiration check: 0:02:27 ago on Fri 30 Jan 2026 04:03:22 PM EST. Adding versionlock on: kernel-0:5.14.0-655.el9.* Adding versionlock on: kernel-0:5.14.0-654.el9.* Adding versionlock on: kernel-0:5.14.0-665.el9.* changed: [managed-node1] => (item=kernel-core) => { "ansible_loop_var": "item", "changed": true, "cmd": [ "dnf", "versionlock", "add", "kernel-core" ], "delta": "0:00:00.890759", "end": "2026-01-30 16:05:51.206917", "item": "kernel-core", "rc": 0, "start": "2026-01-30 16:05:50.316158" } STDOUT: Last metadata expiration check: 0:02:28 ago on Fri 30 Jan 2026 04:03:22 PM EST. Adding versionlock on: kernel-core-0:5.14.0-655.el9.* Adding versionlock on: kernel-core-0:5.14.0-665.el9.* Adding versionlock on: kernel-core-0:5.14.0-654.el9.* changed: [managed-node1] => (item=kernel-modules) => { "ansible_loop_var": "item", "changed": true, "cmd": [ "dnf", "versionlock", "add", "kernel-modules" ], "delta": "0:00:00.922388", "end": "2026-01-30 16:05:52.455873", "item": "kernel-modules", "rc": 0, "start": "2026-01-30 16:05:51.533485" } STDOUT: Last metadata expiration check: 0:02:30 ago on Fri 30 Jan 2026 04:03:22 PM EST. Adding versionlock on: kernel-modules-0:5.14.0-654.el9.* Adding versionlock on: kernel-modules-0:5.14.0-655.el9.* Adding versionlock on: kernel-modules-0:5.14.0-665.el9.* changed: [managed-node1] => (item=kernel-modules-extra) => { "ansible_loop_var": "item", "changed": true, "cmd": [ "dnf", "versionlock", "add", "kernel-modules-extra" ], "delta": "0:00:00.886612", "end": "2026-01-30 16:05:53.671133", "item": "kernel-modules-extra", "rc": 0, "start": "2026-01-30 16:05:52.784521" } STDOUT: Last metadata expiration check: 0:02:31 ago on Fri 30 Jan 2026 04:03:22 PM EST. Adding versionlock on: kernel-modules-extra-0:5.14.0-661.el9.* Adding versionlock on: kernel-modules-extra-0:5.14.0-658.el9.* Adding versionlock on: kernel-modules-extra-0:5.14.0-665.el9.* Adding versionlock on: kernel-modules-extra-0:5.14.0-654.el9.* Adding versionlock on: kernel-modules-extra-0:5.14.0-655.el9.* changed: [managed-node1] => (item=kernel-devel) => { "ansible_loop_var": "item", "changed": true, "cmd": [ "dnf", "versionlock", "add", "kernel-devel" ], "delta": "0:00:00.916204", "end": "2026-01-30 16:05:54.917414", "item": "kernel-devel", "rc": 0, "start": "2026-01-30 16:05:54.001210" } STDOUT: Last metadata expiration check: 0:02:32 ago on Fri 30 Jan 2026 04:03:22 PM EST. Adding versionlock on: kernel-devel-0:5.14.0-655.el9.* Adding versionlock on: kernel-devel-0:5.14.0-654.el9.* Adding versionlock on: kernel-devel-0:5.14.0-665.el9.* changed: [managed-node1] => (item=kernel-headers) => { "ansible_loop_var": "item", "changed": true, "cmd": [ "dnf", "versionlock", "add", "kernel-headers" ], "delta": "0:00:00.897060", "end": "2026-01-30 16:05:56.146333", "item": "kernel-headers", "rc": 0, "start": "2026-01-30 16:05:55.249273" } STDOUT: Last metadata expiration check: 0:02:33 ago on Fri 30 Jan 2026 04:03:22 PM EST. Adding versionlock on: kernel-headers-0:5.14.0-665.el9.* TASK [fedora.linux_system_roles.hpc : Update all packages to bring system to the latest state] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:274 Friday 30 January 2026 16:05:56 -0500 (0:00:07.426) 0:02:30.674 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "hpc_update_all_packages", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Get list of dnf modules] ***************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:285 Friday 30 January 2026 16:05:56 -0500 (0:00:00.018) 0:02:30.692 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "ansible_facts[\"system_vendor\"] == \"Microsoft Corporation\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Reset nvidia-driver module if it is enabled of different version] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:290 Friday 30 January 2026 16:05:56 -0500 (0:00:00.028) 0:02:30.721 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "ansible_facts[\"system_vendor\"] == \"Microsoft Corporation\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Enable NVIDIA driver module] ************* task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:295 Friday 30 January 2026 16:05:56 -0500 (0:00:00.027) 0:02:30.748 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "ansible_facts[\"system_vendor\"] == \"Microsoft Corporation\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Install dkms] **************************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:304 Friday 30 January 2026 16:05:56 -0500 (0:00:00.023) 0:02:30.771 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "ansible_facts[\"system_vendor\"] == \"Microsoft Corporation\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Install NVIDIA driver] ******************* task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:312 Friday 30 January 2026 16:05:56 -0500 (0:00:00.023) 0:02:30.795 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "ansible_facts[\"system_vendor\"] == \"Microsoft Corporation\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Restart dkms service to make it build nvidia drivers for all kernels] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:322 Friday 30 January 2026 16:05:56 -0500 (0:00:00.027) 0:02:30.823 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "ansible_facts[\"system_vendor\"] == \"Microsoft Corporation\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Install CUDA driver] ********************* task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:333 Friday 30 January 2026 16:05:56 -0500 (0:00:00.023) 0:02:30.846 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "ansible_facts[\"system_vendor\"] == \"Microsoft Corporation\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Enable nvidia-persistenced.service] ****** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:341 Friday 30 January 2026 16:05:56 -0500 (0:00:00.024) 0:02:30.870 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "ansible_facts[\"system_vendor\"] == \"Microsoft Corporation\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Install CUDA Toolkit] ******************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:351 Friday 30 January 2026 16:05:56 -0500 (0:00:00.026) 0:02:30.897 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "hpc_install_cuda_toolkit", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Prevent update of CUDA Toolkit packages] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:360 Friday 30 January 2026 16:05:56 -0500 (0:00:00.017) 0:02:30.915 ******** skipping: [managed-node1] => (item=kernel) => { "ansible_loop_var": "item", "changed": false, "false_condition": "hpc_install_cuda_toolkit", "item": "kernel", "skip_reason": "Conditional result was False" } skipping: [managed-node1] => (item=kernel-core) => { "ansible_loop_var": "item", "changed": false, "false_condition": "hpc_install_cuda_toolkit", "item": "kernel-core", "skip_reason": "Conditional result was False" } skipping: [managed-node1] => (item=kernel-modules) => { "ansible_loop_var": "item", "changed": false, "false_condition": "hpc_install_cuda_toolkit", "item": "kernel-modules", "skip_reason": "Conditional result was False" } skipping: [managed-node1] => (item=kernel-modules-extra) => { "ansible_loop_var": "item", "changed": false, "false_condition": "hpc_install_cuda_toolkit", "item": "kernel-modules-extra", "skip_reason": "Conditional result was False" } skipping: [managed-node1] => (item=kernel-devel) => { "ansible_loop_var": "item", "changed": false, "false_condition": "hpc_install_cuda_toolkit", "item": "kernel-devel", "skip_reason": "Conditional result was False" } skipping: [managed-node1] => (item=kernel-headers) => { "ansible_loop_var": "item", "changed": false, "false_condition": "hpc_install_cuda_toolkit", "item": "kernel-headers", "skip_reason": "Conditional result was False" } skipping: [managed-node1] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.hpc : Install NVIDIA NCCL] ********************* task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:371 Friday 30 January 2026 16:05:56 -0500 (0:00:00.030) 0:02:30.946 ******** changed: [managed-node1] => { "attempts": 1, "changed": true, "rc": 0, "results": [ "Installed: libnccl-2.27.5-1+cuda12.9.x86_64", "Installed: libnccl-devel-2.27.5-1+cuda12.9.x86_64" ] } TASK [fedora.linux_system_roles.hpc : Prevent update of NVIDIA NCCL packages] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:380 Friday 30 January 2026 16:06:19 -0500 (0:00:23.271) 0:02:54.218 ******** changed: [managed-node1] => (item=libnccl-2.27.5-1+cuda12.9) => { "ansible_loop_var": "item", "changed": true, "cmd": [ "dnf", "versionlock", "add", "libnccl-2.27.5-1+cuda12.9" ], "delta": "0:00:00.926590", "end": "2026-01-30 16:06:20.977900", "item": "libnccl-2.27.5-1+cuda12.9", "rc": 0, "start": "2026-01-30 16:06:20.051310" } STDOUT: Last metadata expiration check: 0:02:58 ago on Fri 30 Jan 2026 04:03:22 PM EST. Adding versionlock on: libnccl-0:2.27.5-1+cuda12.9.* changed: [managed-node1] => (item=libnccl-devel-2.27.5-1+cuda12.9) => { "ansible_loop_var": "item", "changed": true, "cmd": [ "dnf", "versionlock", "add", "libnccl-devel-2.27.5-1+cuda12.9" ], "delta": "0:00:00.908779", "end": "2026-01-30 16:06:22.218676", "item": "libnccl-devel-2.27.5-1+cuda12.9", "rc": 0, "start": "2026-01-30 16:06:21.309897" } STDOUT: Last metadata expiration check: 0:02:59 ago on Fri 30 Jan 2026 04:03:22 PM EST. Adding versionlock on: libnccl-devel-0:2.27.5-1+cuda12.9.* TASK [fedora.linux_system_roles.hpc : Install NVIDIA Fabric Manager] *********** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:391 Friday 30 January 2026 16:06:22 -0500 (0:00:02.524) 0:02:56.742 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "hpc_install_nvidia_fabric_manager", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Ensure that Fabric Manager service is enabled] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:399 Friday 30 January 2026 16:06:22 -0500 (0:00:00.018) 0:02:56.761 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "hpc_install_nvidia_fabric_manager", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Install RDMA packages] ******************* task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:407 Friday 30 January 2026 16:06:22 -0500 (0:00:00.047) 0:02:56.808 ******** changed: [managed-node1] => { "attempts": 1, "changed": true, "rc": 0, "results": [ "Installed: ucx-1.18.1-1.el9.x86_64", "Installed: libibumad-61.0-2.el9.x86_64", "Installed: librdmacm-61.0-2.el9.x86_64", "Installed: rdma-core-61.0-2.el9.x86_64", "Installed: ucx-ib-1.18.1-1.el9.x86_64", "Installed: azure-vm-utils-0.7.0-1.el9.x86_64", "Installed: librdmacm-utils-61.0-2.el9.x86_64", "Installed: ucx-rdmacm-1.18.1-1.el9.x86_64", "Installed: libibverbs-61.0-2.el9.x86_64", "Installed: infiniband-diags-61.0-2.el9.x86_64", "Installed: pciutils-3.7.0-7.el9.x86_64", "Installed: libibverbs-utils-61.0-2.el9.x86_64", "Installed: rdma-core-devel-61.0-2.el9.x86_64", "Installed: pciutils-devel-3.7.0-7.el9.x86_64", "Installed: WALinuxAgent-2.14.0.1-3.el9.noarch", "Installed: perftest-25.04.0.0.84-1.el9.x86_64", "Installed: python3-pyasn1-0.4.8-6.el9.noarch", "Installed: WALinuxAgent-udev-2.14.0.1-3.el9.noarch" ] } TASK [fedora.linux_system_roles.hpc : Enable RDMA in waagent configuration] **** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:417 Friday 30 January 2026 16:06:26 -0500 (0:00:04.159) 0:03:00.968 ******** Notification for handler Restart waagent has been saved. changed: [managed-node1] => { "backup": "/etc/waagent.conf.88940.2026-01-30@16:06:26~", "changed": true } MSG: line added TASK [fedora.linux_system_roles.hpc : Install common OpenMPI packages] ********* task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:426 Friday 30 January 2026 16:06:27 -0500 (0:00:00.530) 0:03:01.498 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "hpc_install_system_openmpi or hpc_build_openmpi_w_nvidia_gpu_support", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Install system OpenMPI] ****************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:435 Friday 30 January 2026 16:06:27 -0500 (0:00:00.020) 0:03:01.518 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "hpc_install_system_openmpi", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Install build dependencies] ************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:447 Friday 30 January 2026 16:06:27 -0500 (0:00:00.018) 0:03:01.537 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "hpc_build_openmpi_w_nvidia_gpu_support", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Set __hpc_hpcx_path fact] **************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:457 Friday 30 January 2026 16:06:27 -0500 (0:00:00.017) 0:03:01.555 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "hpc_build_openmpi_w_nvidia_gpu_support", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Set facts for building HPC-X and OpenMPI] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:463 Friday 30 January 2026 16:06:27 -0500 (0:00:00.020) 0:03:01.575 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "hpc_build_openmpi_w_nvidia_gpu_support", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Get stat of pmix path] ******************* task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:473 Friday 30 January 2026 16:06:27 -0500 (0:00:00.018) 0:03:01.594 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "hpc_build_openmpi_w_nvidia_gpu_support", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Download PMIx] *************************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:481 Friday 30 January 2026 16:06:27 -0500 (0:00:00.018) 0:03:01.612 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "hpc_build_openmpi_w_nvidia_gpu_support", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Build PMIx] ****************************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:486 Friday 30 January 2026 16:06:27 -0500 (0:00:00.019) 0:03:01.632 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "hpc_build_openmpi_w_nvidia_gpu_support", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Ensure PMIx modulefile directory exists] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:499 Friday 30 January 2026 16:06:27 -0500 (0:00:00.018) 0:03:01.650 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "hpc_build_openmpi_w_nvidia_gpu_support", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Install PMIx modulefile] ***************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:508 Friday 30 January 2026 16:06:27 -0500 (0:00:00.018) 0:03:01.668 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "hpc_build_openmpi_w_nvidia_gpu_support", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Download GDRCopy] ************************ task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:527 Friday 30 January 2026 16:06:27 -0500 (0:00:00.020) 0:03:01.689 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "hpc_build_openmpi_w_nvidia_gpu_support", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Build GDRCopy RPM packages] ************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:532 Friday 30 January 2026 16:06:27 -0500 (0:00:00.018) 0:03:01.707 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "hpc_build_openmpi_w_nvidia_gpu_support", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Install GDRCopy packages from built RPMs] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:540 Friday 30 January 2026 16:06:27 -0500 (0:00:00.017) 0:03:01.725 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "hpc_build_openmpi_w_nvidia_gpu_support", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Remove extracted tarball] **************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:550 Friday 30 January 2026 16:06:27 -0500 (0:00:00.018) 0:03:01.744 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "hpc_build_openmpi_w_nvidia_gpu_support", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Get stat of hpcx-rebuild path] *********** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:556 Friday 30 January 2026 16:06:27 -0500 (0:00:00.018) 0:03:01.762 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "hpc_build_openmpi_w_nvidia_gpu_support", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Download HPC-X] ************************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:564 Friday 30 January 2026 16:06:27 -0500 (0:00:00.017) 0:03:01.779 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "hpc_build_openmpi_w_nvidia_gpu_support", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Ensure that pkgconfig files use hpcx_home={{ __hpc_hpcx_path }}] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:571 Friday 30 January 2026 16:06:27 -0500 (0:00:00.020) 0:03:01.800 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "hpc_build_openmpi_w_nvidia_gpu_support", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Update hcoll pkgconfig file to add -locoms parameter] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:585 Friday 30 January 2026 16:06:27 -0500 (0:00:00.018) 0:03:01.818 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "hpc_build_openmpi_w_nvidia_gpu_support", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Copy HPC-X files to {{ __hpc_hpcx_path }}] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:592 Friday 30 January 2026 16:06:27 -0500 (0:00:00.019) 0:03:01.837 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "hpc_build_openmpi_w_nvidia_gpu_support", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Rebuild HPC-X with PMIx] ***************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:601 Friday 30 January 2026 16:06:27 -0500 (0:00:00.019) 0:03:01.857 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "hpc_build_openmpi_w_nvidia_gpu_support", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Copy ompi/tests to hpcx-rebuild in {{ __hpc_hpcx_path }}] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:614 Friday 30 January 2026 16:06:27 -0500 (0:00:00.018) 0:03:01.875 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "hpc_build_openmpi_w_nvidia_gpu_support", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Remove extracted tarball] **************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:623 Friday 30 January 2026 16:06:27 -0500 (0:00:00.017) 0:03:01.893 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "hpc_build_openmpi_w_nvidia_gpu_support", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Ensure MPI modulefile directory exists] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:629 Friday 30 January 2026 16:06:27 -0500 (0:00:00.019) 0:03:01.912 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "hpc_build_openmpi_w_nvidia_gpu_support", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Install NVidia HPCX OpemMPI modulefile] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:637 Friday 30 January 2026 16:06:27 -0500 (0:00:00.017) 0:03:01.929 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "hpc_build_openmpi_w_nvidia_gpu_support", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Install NVidia HPCX OpemMPI with PMIx 4.2.9] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:646 Friday 30 January 2026 16:06:27 -0500 (0:00:00.023) 0:03:01.952 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "hpc_build_openmpi_w_nvidia_gpu_support", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Get stat of openmpi path] **************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:656 Friday 30 January 2026 16:06:27 -0500 (0:00:00.019) 0:03:01.972 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "hpc_build_openmpi_w_nvidia_gpu_support", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Download openmpi] ************************ task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:664 Friday 30 January 2026 16:06:27 -0500 (0:00:00.018) 0:03:01.990 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "hpc_build_openmpi_w_nvidia_gpu_support", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Build openmpi] *************************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:669 Friday 30 January 2026 16:06:27 -0500 (0:00:00.019) 0:03:02.010 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "hpc_build_openmpi_w_nvidia_gpu_support", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Remove extracted tarball] **************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:687 Friday 30 January 2026 16:06:27 -0500 (0:00:00.020) 0:03:02.031 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "hpc_build_openmpi_w_nvidia_gpu_support", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Install OpenMPI modulefile] ************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:693 Friday 30 January 2026 16:06:27 -0500 (0:00:00.018) 0:03:02.049 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "hpc_build_openmpi_w_nvidia_gpu_support", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Remove user memory limits to ensure applications aren't restricted] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:704 Friday 30 January 2026 16:06:27 -0500 (0:00:00.017) 0:03:02.067 ******** ok: [managed-node1] => { "changed": false, "checksum": "a592ca2fabb2feaeada16e146d204e0890b43eae", "dest": "/etc/security/limits.d/90-hpc-limits.conf", "gid": 0, "group": "root", "mode": "0644", "owner": "root", "path": "/etc/security/limits.d/90-hpc-limits.conf", "secontext": "system_u:object_r:etc_t:s0", "size": 333, "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.hpc : Add sysctl tuning configuration for HPC] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:712 Friday 30 January 2026 16:06:28 -0500 (0:00:00.991) 0:03:03.059 ******** ok: [managed-node1] => { "changed": false, "checksum": "730dba3d79bdaadebc65705f14efa9e43bd77aa8", "dest": "/etc/sysctl.d/90-hpc-sysctl.conf", "gid": 0, "group": "root", "mode": "0644", "owner": "root", "path": "/etc/sysctl.d/90-hpc-sysctl.conf", "secontext": "system_u:object_r:system_conf_t:s0", "size": 377, "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.hpc : Load sunrpc kernel module] *************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:721 Friday 30 January 2026 16:06:29 -0500 (0:00:00.632) 0:03:03.691 ******** ok: [managed-node1] => { "backup": "", "changed": false } TASK [fedora.linux_system_roles.hpc : Check if sunrpc module is loaded] ******** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:731 Friday 30 January 2026 16:06:29 -0500 (0:00:00.371) 0:03:04.063 ******** ok: [managed-node1] => { "changed": false, "cmd": [ "lsmod" ], "delta": "0:00:00.004794", "end": "2026-01-30 16:06:29.892844", "rc": 0, "start": "2026-01-30 16:06:29.888050" } STDOUT: Module Size Used by dm_mod 249856 7 sd_mod 90112 2 sg 53248 0 target_core_user 77824 0 uio 32768 1 target_core_user target_core_pscsi 32768 0 target_core_file 32768 4 target_core_iblock 28672 0 tcm_loop 40960 7 iscsi_target_mod 499712 1 target_core_mod 593920 18 tcm_loop,target_core_file,target_core_iblock,iscsi_target_mod,target_core_pscsi,target_core_user tls 159744 0 rfkill 40960 1 cirrus_qemu 16384 0 drm_client_lib 16384 1 cirrus_qemu drm_shmem_helper 36864 2 cirrus_qemu drm_kms_helper 270336 3 cirrus_qemu,drm_shmem_helper,drm_client_lib intel_rapl_msr 20480 0 intel_rapl_common 57344 1 intel_rapl_msr i2c_piix4 36864 0 rapl 24576 0 i2c_smbus 20480 1 i2c_piix4 pcspkr 12288 0 sunrpc 884736 1 fuse 212992 1 drm 864256 5 drm_kms_helper,cirrus_qemu,drm_shmem_helper,drm_client_lib xfs 3092480 3 libcrc32c 12288 1 xfs ata_generic 12288 0 crct10dif_pclmul 12288 1 crc32_pclmul 12288 0 ata_piix 45056 0 crc32c_intel 24576 1 libata 516096 2 ata_piix,ata_generic xen_netfront 53248 1 xen_blkfront 57344 2 ghash_clmulni_intel 16384 0 serio_raw 16384 0 TASK [fedora.linux_system_roles.hpc : Load sunrpc module if not loaded] ******** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:736 Friday 30 January 2026 16:06:29 -0500 (0:00:00.353) 0:03:04.417 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "'sunrpc' not in __hpc_loaded_modules.stdout", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Copy NFS readahead udev rules for Azure infrastructure] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:741 Friday 30 January 2026 16:06:29 -0500 (0:00:00.020) 0:03:04.437 ******** ok: [managed-node1] => { "changed": false, "checksum": "53e5cca42800593172de84f4a440b5e490c86445", "dest": "/etc/udev/rules.d/90-nfs-readahead.rules", "gid": 0, "group": "root", "mode": "0644", "owner": "root", "path": "/etc/udev/rules.d/90-nfs-readahead.rules", "secontext": "system_u:object_r:udev_rules_t:s0", "size": 220, "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.hpc : Create Azure HPC resource directories] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:750 Friday 30 January 2026 16:06:30 -0500 (0:00:00.624) 0:03:05.062 ******** ok: [managed-node1] => (item=/opt/hpc/azure) => { "ansible_loop_var": "item", "changed": false, "gid": 0, "group": "root", "item": "/opt/hpc/azure", "mode": "0755", "owner": "root", "path": "/opt/hpc/azure", "secontext": "unconfined_u:object_r:usr_t:s0", "size": 81, "state": "directory", "uid": 0 } ok: [managed-node1] => (item=/opt/hpc/azure/bin) => { "ansible_loop_var": "item", "changed": false, "gid": 0, "group": "root", "item": "/opt/hpc/azure/bin", "mode": "0755", "owner": "root", "path": "/opt/hpc/azure/bin", "secontext": "unconfined_u:object_r:usr_t:s0", "size": 77, "state": "directory", "uid": 0 } ok: [managed-node1] => (item=/opt/hpc/azure/tools) => { "ansible_loop_var": "item", "changed": false, "gid": 0, "group": "root", "item": "/opt/hpc/azure/tools", "mode": "0755", "owner": "root", "path": "/opt/hpc/azure/tools", "secontext": "unconfined_u:object_r:usr_t:s0", "size": 19, "state": "directory", "uid": 0 } ok: [managed-node1] => (item=/opt/hpc/azure/tests) => { "ansible_loop_var": "item", "changed": false, "gid": 0, "group": "root", "item": "/opt/hpc/azure/tests", "mode": "0755", "owner": "root", "path": "/opt/hpc/azure/tests", "secontext": "unconfined_u:object_r:usr_t:s0", "size": 31, "state": "directory", "uid": 0 } ok: [managed-node1] => (item=/var/hpc/azure) => { "ansible_loop_var": "item", "changed": false, "gid": 0, "group": "root", "item": "/var/hpc/azure", "mode": "0755", "owner": "root", "path": "/var/hpc/azure", "secontext": "unconfined_u:object_r:var_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.hpc : Check if already installed] ************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:767 Friday 30 January 2026 16:06:32 -0500 (0:00:01.725) 0:03:06.787 ******** ok: [managed-node1] => { "changed": false, "stat": { "atime": 1769806936.503352, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1769806938.6603591, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 188744529, "isblk": false, "ischr": false, "isdir": true, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/directory", "mode": "0755", "mtime": 1769806938.6603591, "nlink": 2, "path": "/opt/hpc/azure/topology", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 112, "uid": 0, "version": "1918538425", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.hpc : Install Topology Definitions] ************ task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:775 Friday 30 January 2026 16:06:32 -0500 (0:00:00.361) 0:03:07.149 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "not __hpc_sku_topology_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Install Graph Files] ********************* task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:783 Friday 30 January 2026 16:06:32 -0500 (0:00:00.020) 0:03:07.169 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "not __hpc_sku_topology_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Install setup script] ******************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:791 Friday 30 January 2026 16:06:32 -0500 (0:00:00.022) 0:03:07.192 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "not __hpc_sku_topology_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Install removal script] ****************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:799 Friday 30 January 2026 16:06:32 -0500 (0:00:00.021) 0:03:07.214 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "not __hpc_sku_topology_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Install systemd service file] ************ task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:807 Friday 30 January 2026 16:06:32 -0500 (0:00:00.020) 0:03:07.234 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "not __hpc_sku_topology_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Enable systemd service file] ************* task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:815 Friday 30 January 2026 16:06:32 -0500 (0:00:00.021) 0:03:07.256 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "not __hpc_sku_topology_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Install tests] *************************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:820 Friday 30 January 2026 16:06:32 -0500 (0:00:00.021) 0:03:07.277 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "not __hpc_sku_topology_stat.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Remove build dependencies] *************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:828 Friday 30 January 2026 16:06:32 -0500 (0:00:00.020) 0:03:07.297 ******** ok: [managed-node1] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.hpc : Check if Moneo is already installed] ***** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:842 Friday 30 January 2026 16:06:34 -0500 (0:00:01.206) 0:03:08.504 ******** ok: [managed-node1] => { "changed": false, "stat": { "atime": 1769806984.0635145, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 40, "charset": "us-ascii", "checksum": "23ab7091a7d20e7b37eadaeb08459e6ed8cd7955", "ctime": 1769806951.0344017, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 591397011, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/x-script.python", "mode": "0664", "mtime": 1704916257.0, "nlink": 1, "path": "/opt/hpc/azure/tools/Moneo/moneo.py", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 19369, "uid": 0, "version": "506716735", "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.hpc : Ensure Moneo install directory exists] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:850 Friday 30 January 2026 16:06:34 -0500 (0:00:00.374) 0:03:08.878 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "not __hpc_moneo_installed.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Download Moneo] ************************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:858 Friday 30 January 2026 16:06:34 -0500 (0:00:00.021) 0:03:08.900 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "not __hpc_moneo_installed.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Copy Moneo files to install directory] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:863 Friday 30 January 2026 16:06:34 -0500 (0:00:00.022) 0:03:08.923 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "not __hpc_moneo_installed.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Configure Moneo service] ***************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:872 Friday 30 January 2026 16:06:34 -0500 (0:00:00.021) 0:03:08.944 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "not __hpc_moneo_installed.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Remove extracted temp directory] ********* task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:877 Friday 30 January 2026 16:06:34 -0500 (0:00:00.021) 0:03:08.965 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "not __hpc_moneo_installed.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.hpc : Add Moneo alias to /etc/bashrc] ********** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:883 Friday 30 January 2026 16:06:34 -0500 (0:00:00.023) 0:03:08.988 ******** ok: [managed-node1] => { "backup": "", "changed": false } TASK [fedora.linux_system_roles.hpc : Clean dnf cache] ************************* task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:890 Friday 30 January 2026 16:06:34 -0500 (0:00:00.369) 0:03:09.358 ******** ok: [managed-node1] => { "changed": false, "cmd": [ "dnf", "clean", "all" ], "delta": "0:00:00.224380", "end": "2026-01-30 16:06:35.409426", "rc": 0, "start": "2026-01-30 16:06:35.185046" } STDOUT: 59 files removed TASK [Assert current LV size of {{ item }}] ************************************ task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/tests/hpc/tests_skip_toolkit.yml:92 Friday 30 January 2026 16:06:35 -0500 (0:00:00.584) 0:03:09.942 ******** ok: [managed-node1] => (item=rootlv) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "lvs", "--noheadings", "--units", "g", "--nosuffix", "-o", "lv_size", "/dev/mapper/rootvg-rootlv" ], "delta": "0:00:00.031570", "end": "2026-01-30 16:06:35.811960", "failed_when_result": false, "item": "rootlv", "rc": 0, "start": "2026-01-30 16:06:35.780390" } STDOUT: 2.00 ok: [managed-node1] => (item=usrlv) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "lvs", "--noheadings", "--units", "g", "--nosuffix", "-o", "lv_size", "/dev/mapper/rootvg-usrlv" ], "delta": "0:00:00.033678", "end": "2026-01-30 16:06:36.200404", "failed_when_result": false, "item": "usrlv", "rc": 0, "start": "2026-01-30 16:06:36.166726" } STDOUT: 2.00 TASK [Flush handlers] ********************************************************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/tests/hpc/tests_skip_toolkit.yml:103 Friday 30 January 2026 16:06:36 -0500 (0:00:00.784) 0:03:10.727 ******** NOTIFIED HANDLER fedora.linux_system_roles.hpc : Restart waagent for managed-node1 META: triggered running handlers for managed-node1 RUNNING HANDLER [fedora.linux_system_roles.hpc : Restart waagent] ************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/handlers/main.yml:6 Friday 30 January 2026 16:06:36 -0500 (0:00:00.006) 0:03:10.734 ******** changed: [managed-node1] => { "changed": true, "name": "waagent", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "basic.target azure.slice systemd-journald.socket sysinit.target network-online.target", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Azure Linux Agent", "DevicePolicy": "auto", "DynamicUser": "no", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/python3 ; argv[]=/usr/bin/python3 -u /usr/sbin/waagent -daemon ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/python3 ; argv[]=/usr/bin/python3 -u /usr/sbin/waagent -daemon ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/waagent.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "waagent.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13684", "LimitNPROCSoft": "13684", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13684", "LimitSIGPENDINGSoft": "13684", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "18446744073709551615", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "waagent.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "azure.slice sysinit.target", "Restart": "always", "RestartKillSignal": "15", "RestartUSec": "5s", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "azure.slice", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "21894", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "simple", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "disabled", "UtmpMode": "init", "Wants": "network-online.target sshd-keygen.service sshd.service", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [Print 90-hpc-limits.conf file] ******************************************* task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/tests/hpc/tests_skip_toolkit.yml:107 Friday 30 January 2026 16:06:36 -0500 (0:00:00.516) 0:03:11.251 ******** ok: [managed-node1] => { "changed": false, "cmd": [ "cat", "/etc/security/limits.d/90-hpc-limits.conf" ], "delta": "0:00:00.003623", "end": "2026-01-30 16:06:37.081997", "rc": 0, "start": "2026-01-30 16:06:37.078374" } STDOUT: # # Ansible managed # # system_role:hpc * hard memlock unlimited * soft memlock unlimited * hard nofile 65535 * soft nofile 65535 * hard stack unlimited * soft stack unlimited TASK [Verify limits in 90-hpc-limits.conf] ************************************* task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/tests/hpc/tests_skip_toolkit.yml:112 Friday 30 January 2026 16:06:37 -0500 (0:00:00.353) 0:03:11.605 ******** ok: [managed-node1] => { "changed": false } MSG: All assertions passed TASK [Verify sysctl settings set from templates] ******************************* task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/tests/hpc/tests_skip_toolkit.yml:134 Friday 30 January 2026 16:06:37 -0500 (0:00:00.026) 0:03:11.631 ******** failed: [managed-node1] (item={'setting': 'vm.zone_reclaim_mode', 'value': 1}) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "sysctl", "-n", "vm.zone_reclaim_mode" ], "delta": "0:00:00.005268", "end": "2026-01-30 16:06:37.462312", "failed_when_result": true, "item": { "setting": "vm.zone_reclaim_mode", "value": 1 }, "rc": 0, "start": "2026-01-30 16:06:37.457044" } STDOUT: 0 failed: [managed-node1] (item={'setting': 'net.ipv4.neigh.default.gc_thresh1', 'value': 4096}) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "sysctl", "-n", "net.ipv4.neigh.default.gc_thresh1" ], "delta": "0:00:00.003883", "end": "2026-01-30 16:06:37.827856", "failed_when_result": true, "item": { "setting": "net.ipv4.neigh.default.gc_thresh1", "value": 4096 }, "rc": 0, "start": "2026-01-30 16:06:37.823973" } STDOUT: 128 failed: [managed-node1] (item={'setting': 'net.ipv4.neigh.default.gc_thresh2', 'value': 8192}) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "sysctl", "-n", "net.ipv4.neigh.default.gc_thresh2" ], "delta": "0:00:00.003241", "end": "2026-01-30 16:06:38.164379", "failed_when_result": true, "item": { "setting": "net.ipv4.neigh.default.gc_thresh2", "value": 8192 }, "rc": 0, "start": "2026-01-30 16:06:38.161138" } STDOUT: 512 failed: [managed-node1] (item={'setting': 'net.ipv4.neigh.default.gc_thresh3', 'value': 16384}) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "sysctl", "-n", "net.ipv4.neigh.default.gc_thresh3" ], "delta": "0:00:00.003265", "end": "2026-01-30 16:06:38.501350", "failed_when_result": true, "item": { "setting": "net.ipv4.neigh.default.gc_thresh3", "value": 16384 }, "rc": 0, "start": "2026-01-30 16:06:38.498085" } STDOUT: 1024 failed: [managed-node1] (item={'setting': 'sunrpc.tcp_max_slot_table_entries', 'value': 128}) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "sysctl", "-n", "sunrpc.tcp_max_slot_table_entries" ], "delta": "0:00:00.003241", "end": "2026-01-30 16:06:38.843881", "failed_when_result": true, "item": { "setting": "sunrpc.tcp_max_slot_table_entries", "value": 128 }, "rc": 0, "start": "2026-01-30 16:06:38.840640" } STDOUT: 65536 TASK [Remove both of the LVM logical volumes in 'foo' created above] *********** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/tests/hpc/tests_skip_toolkit.yml:152 Friday 30 January 2026 16:06:38 -0500 (0:00:01.740) 0:03:13.372 ******** included: fedora.linux_system_roles.storage for managed-node1 TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:2 Friday 30 January 2026 16:06:39 -0500 (0:00:00.121) 0:03:13.494 ******** included: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml for managed-node1 TASK [fedora.linux_system_roles.storage : Ensure ansible_facts used by role] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:2 Friday 30 January 2026 16:06:39 -0500 (0:00:00.029) 0:03:13.523 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "__storage_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set platform/version specific variables] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:7 Friday 30 January 2026 16:06:39 -0500 (0:00:00.045) 0:03:13.569 ******** skipping: [managed-node1] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node1] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node1] => (item=CentOS_9.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs", "stratisd", "stratis-cli", "{{ 'libblockdev-s390' if ansible_facts['architecture'] == 's390x' else 'libblockdev' }}" ] }, "ansible_included_var_files": [ "/tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_9.yml" } ok: [managed-node1] => (item=CentOS_9.yml) => { "ansible_facts": { "blivet_package_list": [ "python3-blivet", "libblockdev-crypto", "libblockdev-dm", "libblockdev-lvm", "libblockdev-mdraid", "libblockdev-swap", "vdo", "kmod-kvdo", "xfsprogs", "stratisd", "stratis-cli", "{{ 'libblockdev-s390' if ansible_facts['architecture'] == 's390x' else 'libblockdev' }}" ] }, "ansible_included_var_files": [ "/tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/vars/CentOS_9.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_9.yml" } TASK [fedora.linux_system_roles.storage : Check if system is ostree] *********** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:25 Friday 30 January 2026 16:06:39 -0500 (0:00:00.062) 0:03:13.632 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "not __storage_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Set flag to indicate system is ostree] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/set_vars.yml:30 Friday 30 January 2026 16:06:39 -0500 (0:00:00.024) 0:03:13.657 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "not __storage_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Define an empty list of pools to be used in testing] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:5 Friday 30 January 2026 16:06:39 -0500 (0:00:00.022) 0:03:13.680 ******** ok: [managed-node1] => { "ansible_facts": { "_storage_pools_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Define an empty list of volumes to be used in testing] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:9 Friday 30 January 2026 16:06:39 -0500 (0:00:00.020) 0:03:13.700 ******** ok: [managed-node1] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Include the appropriate provider tasks] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main.yml:13 Friday 30 January 2026 16:06:39 -0500 (0:00:00.021) 0:03:13.722 ******** redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount included: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml for managed-node1 TASK [fedora.linux_system_roles.storage : Make sure blivet is available] ******* task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 Friday 30 January 2026 16:06:39 -0500 (0:00:00.046) 0:03:13.768 ******** ok: [managed-node1] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.storage : Show storage_pools] ****************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:9 Friday 30 January 2026 16:06:58 -0500 (0:00:18.798) 0:03:32.567 ******** ok: [managed-node1] => { "storage_pools | d([])": [ { "disks": [ "sda", "sdb" ], "grow_to_fill": true, "name": "rootvg", "state": "absent", "volumes": [ { "mount_point": "/hpc-test1", "name": "rootlv", "size": "2G" }, { "mount_point": "/hpc-test2", "name": "usrlv", "size": "1G" } ] } ] } TASK [fedora.linux_system_roles.storage : Show storage_volumes] **************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:14 Friday 30 January 2026 16:06:58 -0500 (0:00:00.045) 0:03:32.612 ******** ok: [managed-node1] => { "storage_volumes | d([])": [] } TASK [fedora.linux_system_roles.storage : Get required packages] *************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 Friday 30 January 2026 16:06:58 -0500 (0:00:00.045) 0:03:32.658 ******** ok: [managed-node1] => { "actions": [], "changed": false, "crypts": [], "leaves": [], "mounts": [], "packages": [], "pools": [], "volumes": [] } TASK [fedora.linux_system_roles.storage : Enable copr repositories if needed] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:32 Friday 30 January 2026 16:06:59 -0500 (0:00:01.285) 0:03:33.944 ******** included: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml for managed-node1 TASK [fedora.linux_system_roles.storage : Check if the COPR support packages should be installed] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:2 Friday 30 January 2026 16:06:59 -0500 (0:00:00.037) 0:03:33.981 ******** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Make sure COPR support packages are present] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:13 Friday 30 January 2026 16:06:59 -0500 (0:00:00.037) 0:03:34.018 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "install_copr | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Enable COPRs] ************************ task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/enable_coprs.yml:19 Friday 30 January 2026 16:06:59 -0500 (0:00:00.038) 0:03:34.057 ******** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Make sure required packages are installed] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:38 Friday 30 January 2026 16:06:59 -0500 (0:00:00.041) 0:03:34.099 ******** ok: [managed-node1] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.storage : Get service facts] ******************* task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:52 Friday 30 January 2026 16:07:00 -0500 (0:00:01.219) 0:03:35.319 ******** ok: [managed-node1] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "apt-daily.service": { "name": "apt-daily.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "azure-ephemeral-disk-setup.service": { "name": "azure-ephemeral-disk-setup.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "blivet.service": { "name": "blivet.service", "source": "systemd", "state": "inactive", "status": "static" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fcoe.service": { "name": "fcoe.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "iscsi-shutdown.service": { "name": "iscsi-shutdown.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsi.service": { "name": "iscsi.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iscsid.service": { "name": "iscsid.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-activation-early.service": { "name": "lvm2-activation-early.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "mdadm-grow-continue@.service": { "name": "mdadm-grow-continue@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdadm-last-resort@.service": { "name": "mdadm-last-resort@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdcheck_continue.service": { "name": "mdcheck_continue.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdcheck_start.service": { "name": "mdcheck_start.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmon@.service": { "name": "mdmon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "mdmonitor-oneshot.service": { "name": "mdmonitor-oneshot.service", "source": "systemd", "state": "inactive", "status": "static" }, "mdmonitor.service": { "name": "mdmonitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "multipathd.service": { "name": "multipathd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ndctl-monitor.service": { "name": "ndctl-monitor.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "raid-check.service": { "name": "raid-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "rbdmap.service": { "name": "rbdmap.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rdma-load-modules@.service": { "name": "rdma-load-modules@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rdma-ndd.service": { "name": "rdma-ndd.service", "source": "systemd", "state": "inactive", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sku_customisation.service": { "name": "sku_customisation.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "stratis-fstab-setup@.service": { "name": "stratis-fstab-setup@.service", "source": "systemd", "state": "unknown", "status": "static" }, "stratisd-min-postinitrd.service": { "name": "stratisd-min-postinitrd.service", "source": "systemd", "state": "inactive", "status": "static" }, "stratisd.service": { "name": "stratisd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles.service": { "name": "systemd-tmpfiles.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "target.service": { "name": "target.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "targetclid.service": { "name": "targetclid.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "waagent.service": { "name": "waagent.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "yppasswdd.service": { "name": "yppasswdd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypserv.service": { "name": "ypserv.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypxfrd.service": { "name": "ypxfrd.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.storage : Set storage_cryptsetup_services] ***** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:58 Friday 30 January 2026 16:07:02 -0500 (0:00:01.980) 0:03:37.300 ******** ok: [managed-node1] => { "ansible_facts": { "storage_cryptsetup_services": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Mask the systemd cryptsetup services] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:64 Friday 30 January 2026 16:07:02 -0500 (0:00:00.058) 0:03:37.358 ******** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 Friday 30 January 2026 16:07:02 -0500 (0:00:00.019) 0:03:37.378 ******** changed: [managed-node1] => { "actions": [ { "action": "destroy format", "device": "/dev/mapper/rootvg-usrlv", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/rootvg-usrlv", "fs_type": null }, { "action": "destroy format", "device": "/dev/mapper/rootvg-rootlv", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/rootvg-rootlv", "fs_type": null }, { "action": "destroy device", "device": "/dev/rootvg", "fs_type": null }, { "action": "destroy format", "device": "/dev/sda", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/sdb", "fs_type": "lvmpv" } ], "changed": true, "crypts": [], "leaves": [ "/dev/sda", "/dev/sdb", "/dev/xvda1" ], "mounts": [ { "fstype": "xfs", "path": "/hpc-test2", "src": "/dev/mapper/rootvg-usrlv", "state": "absent" }, { "fstype": "xfs", "path": "/hpc-test1", "src": "/dev/mapper/rootvg-rootlv", "state": "absent" } ], "packages": [ "xfsprogs" ], "pools": [ { "disks": [ "sda", "sdb" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": true, "name": "rootvg", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/rootvg-rootlv", "_mount_id": "/dev/mapper/rootvg-rootlv", "_raw_device": "/dev/mapper/rootvg-rootlv", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/hpc-test1", "mount_user": null, "name": "rootlv", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "2G", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/rootvg-usrlv", "_mount_id": "/dev/mapper/rootvg-usrlv", "_raw_device": "/dev/mapper/rootvg-usrlv", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/hpc-test2", "mount_user": null, "name": "usrlv", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "1G", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } TASK [fedora.linux_system_roles.storage : Workaround for udev issue on some platforms] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:85 Friday 30 January 2026 16:07:05 -0500 (0:00:02.408) 0:03:39.786 ******** skipping: [managed-node1] => { "changed": false, "false_condition": "storage_udevadm_trigger | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.storage : Check if /etc/fstab is present] ****** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:92 Friday 30 January 2026 16:07:05 -0500 (0:00:00.039) 0:03:39.826 ******** ok: [managed-node1] => { "changed": false, "stat": { "atime": 1769807119.8532386, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "4d88452b6790937bcc1e249bb6e9d737c35ff46b", "ctime": 1769807119.8502386, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 494929982, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1769807119.8502386, "nlink": 1, "path": "/etc/fstab", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1515, "uid": 0, "version": "872395520", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Add fingerprint to /etc/fstab if present] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:97 Friday 30 January 2026 16:07:05 -0500 (0:00:00.365) 0:03:40.191 ******** ok: [managed-node1] => { "backup": "", "changed": false } TASK [fedora.linux_system_roles.storage : Unmask the systemd cryptsetup services] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:115 Friday 30 January 2026 16:07:06 -0500 (0:00:00.369) 0:03:40.560 ******** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Show blivet_output] ****************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:121 Friday 30 January 2026 16:07:06 -0500 (0:00:00.018) 0:03:40.578 ******** ok: [managed-node1] => { "blivet_output": { "actions": [ { "action": "destroy format", "device": "/dev/mapper/rootvg-usrlv", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/rootvg-usrlv", "fs_type": null }, { "action": "destroy format", "device": "/dev/mapper/rootvg-rootlv", "fs_type": "xfs" }, { "action": "destroy device", "device": "/dev/mapper/rootvg-rootlv", "fs_type": null }, { "action": "destroy device", "device": "/dev/rootvg", "fs_type": null }, { "action": "destroy format", "device": "/dev/sda", "fs_type": "lvmpv" }, { "action": "destroy format", "device": "/dev/sdb", "fs_type": "lvmpv" } ], "changed": true, "crypts": [], "failed": false, "leaves": [ "/dev/sda", "/dev/sdb", "/dev/xvda1" ], "mounts": [ { "fstype": "xfs", "path": "/hpc-test2", "src": "/dev/mapper/rootvg-usrlv", "state": "absent" }, { "fstype": "xfs", "path": "/hpc-test1", "src": "/dev/mapper/rootvg-rootlv", "state": "absent" } ], "packages": [ "xfsprogs" ], "pools": [ { "disks": [ "sda", "sdb" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": true, "name": "rootvg", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/rootvg-rootlv", "_mount_id": "/dev/mapper/rootvg-rootlv", "_raw_device": "/dev/mapper/rootvg-rootlv", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/hpc-test1", "mount_user": null, "name": "rootlv", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "2G", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/rootvg-usrlv", "_mount_id": "/dev/mapper/rootvg-usrlv", "_raw_device": "/dev/mapper/rootvg-usrlv", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/hpc-test2", "mount_user": null, "name": "usrlv", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "1G", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ], "volumes": [] } } TASK [fedora.linux_system_roles.storage : Set the list of pools for test verification] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:130 Friday 30 January 2026 16:07:06 -0500 (0:00:00.059) 0:03:40.637 ******** ok: [managed-node1] => { "ansible_facts": { "_storage_pools_list": [ { "disks": [ "sda", "sdb" ], "encryption": false, "encryption_cipher": null, "encryption_clevis_pin": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "encryption_tang_thumbprint": null, "encryption_tang_url": null, "grow_to_fill": true, "name": "rootvg", "raid_chunk_size": null, "raid_device_count": null, "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "shared": false, "state": "absent", "type": "lvm", "volumes": [ { "_device": "/dev/mapper/rootvg-rootlv", "_mount_id": "/dev/mapper/rootvg-rootlv", "_raw_device": "/dev/mapper/rootvg-rootlv", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/hpc-test1", "mount_user": null, "name": "rootlv", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "2G", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null }, { "_device": "/dev/mapper/rootvg-usrlv", "_mount_id": "/dev/mapper/rootvg-usrlv", "_raw_device": "/dev/mapper/rootvg-usrlv", "cache_devices": [], "cache_mode": null, "cache_size": 0, "cached": false, "compression": null, "deduplication": null, "disks": [ "sda", "sdb" ], "encryption": false, "encryption_cipher": null, "encryption_key": null, "encryption_key_size": null, "encryption_luks_version": null, "encryption_password": null, "fs_create_options": "", "fs_label": "", "fs_overwrite_existing": true, "fs_type": "xfs", "mount_check": 0, "mount_device_identifier": "uuid", "mount_group": null, "mount_mode": null, "mount_options": "defaults", "mount_passno": 0, "mount_point": "/hpc-test2", "mount_user": null, "name": "usrlv", "part_type": null, "raid_chunk_size": null, "raid_device_count": null, "raid_disks": [], "raid_level": null, "raid_metadata_version": null, "raid_spare_count": null, "raid_stripe_size": null, "size": "1G", "state": "present", "thin": false, "thin_pool_name": null, "thin_pool_size": null, "type": "lvm", "vdo_pool_size": null } ] } ] }, "changed": false } TASK [fedora.linux_system_roles.storage : Set the list of volumes for test verification] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:134 Friday 30 January 2026 16:07:06 -0500 (0:00:00.027) 0:03:40.665 ******** ok: [managed-node1] => { "ansible_facts": { "_storage_volumes_list": [] }, "changed": false } TASK [fedora.linux_system_roles.storage : Remove obsolete mounts] ************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:150 Friday 30 January 2026 16:07:06 -0500 (0:00:00.022) 0:03:40.687 ******** redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount changed: [managed-node1] => (item={'src': '/dev/mapper/rootvg-usrlv', 'path': '/hpc-test2', 'state': 'absent', 'fstype': 'xfs'}) => { "ansible_loop_var": "mount_info", "backup_file": "", "boot": "yes", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "xfs", "mount_info": { "fstype": "xfs", "path": "/hpc-test2", "src": "/dev/mapper/rootvg-usrlv", "state": "absent" }, "name": "/hpc-test2", "opts": "defaults", "passno": "0", "src": "/dev/mapper/rootvg-usrlv" } redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount redirecting (type: modules) ansible.builtin.mount to ansible.posix.mount changed: [managed-node1] => (item={'src': '/dev/mapper/rootvg-rootlv', 'path': '/hpc-test1', 'state': 'absent', 'fstype': 'xfs'}) => { "ansible_loop_var": "mount_info", "backup_file": "", "boot": "yes", "changed": true, "dump": "0", "fstab": "/etc/fstab", "fstype": "xfs", "mount_info": { "fstype": "xfs", "path": "/hpc-test1", "src": "/dev/mapper/rootvg-rootlv", "state": "absent" }, "name": "/hpc-test1", "opts": "defaults", "passno": "0", "src": "/dev/mapper/rootvg-rootlv" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:161 Friday 30 January 2026 16:07:06 -0500 (0:00:00.741) 0:03:41.429 ******** ok: [managed-node1] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Set up new/current mounts] *********** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:166 Friday 30 January 2026 16:07:07 -0500 (0:00:00.708) 0:03:42.137 ******** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Manage mount ownership/permissions] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:177 Friday 30 January 2026 16:07:07 -0500 (0:00:00.043) 0:03:42.181 ******** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Tell systemd to refresh its view of /etc/fstab] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:189 Friday 30 January 2026 16:07:07 -0500 (0:00:00.046) 0:03:42.228 ******** ok: [managed-node1] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.storage : Retrieve facts for the /etc/crypttab file] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:197 Friday 30 January 2026 16:07:08 -0500 (0:00:00.774) 0:03:43.003 ******** ok: [managed-node1] => { "changed": false, "stat": { "atime": 1769806764.355625, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1767624696.987, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 4194436, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0600", "mtime": 1767624397.527, "nlink": 1, "path": "/etc/crypttab", "pw_name": "root", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 0, "uid": 0, "version": "275126202", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.storage : Manage /etc/crypttab to account for changes we just made] *** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:202 Friday 30 January 2026 16:07:08 -0500 (0:00:00.427) 0:03:43.430 ******** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.storage : Update facts] ************************ task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:224 Friday 30 January 2026 16:07:08 -0500 (0:00:00.020) 0:03:43.450 ******** ok: [managed-node1] TASK [Clean up after the role invocation] ************************************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/tests/hpc/tests_skip_toolkit.yml:169 Friday 30 January 2026 16:07:10 -0500 (0:00:01.214) 0:03:44.665 ******** included: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/tests/hpc/tasks/cleanup.yml for managed-node1 TASK [Check if versionlock entries exist] ************************************** task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/tests/hpc/tasks/cleanup.yml:3 Friday 30 January 2026 16:07:10 -0500 (0:00:00.058) 0:03:44.724 ******** ok: [managed-node1] => { "changed": false, "stat": { "atime": 1769807183.3382626, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "26a9429428736f4db1056618b4c94242a1ffe2ef", "ctime": 1769807182.1802623, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 508293, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1769807182.1802623, "nlink": 1, "path": "/etc/dnf/plugins/versionlock.list", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1005, "uid": 0, "version": "711100237", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Clear dnf versionlock entries] ******************************************* task path: /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/tests/hpc/tasks/cleanup.yml:8 Friday 30 January 2026 16:07:10 -0500 (0:00:00.365) 0:03:45.089 ******** changed: [managed-node1] => { "changed": true, "cmd": [ "dnf", "versionlock", "clear" ], "delta": "0:00:00.809299", "end": "2026-01-30 16:07:11.730354", "rc": 0, "start": "2026-01-30 16:07:10.921055" } STDOUT: Last metadata expiration check: 0:00:02 ago on Fri 30 Jan 2026 04:07:09 PM EST. PLAY RECAP ********************************************************************* managed-node1 : ok=127 changed=14 unreachable=0 failed=1 skipped=107 rescued=0 ignored=0 SYSTEM ROLES ERRORS BEGIN v1 [ { "ansible_version": "2.17.14", "delta": "0:00:00.005268", "end_time": "2026-01-30 16:06:37.462312", "host": "managed-node1", "loop_item": { "setting": "vm.zone_reclaim_mode", "value": 1 }, "loop_label": "", "loop_var": "item", "message": "", "rc": 0, "start_time": "2026-01-30 16:06:37.457044", "stdout": "0", "task_name": "Verify sysctl settings set from templates", "task_path": "/tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/tests/hpc/tests_skip_toolkit.yml:134" }, { "ansible_version": "2.17.14", "delta": "0:00:00.003883", "end_time": "2026-01-30 16:06:37.827856", "host": "managed-node1", "loop_item": { "setting": "net.ipv4.neigh.default.gc_thresh1", "value": 4096 }, "loop_label": "", "loop_var": "item", "message": "", "rc": 0, "start_time": "2026-01-30 16:06:37.823973", "stdout": "128", "task_name": "Verify sysctl settings set from templates", "task_path": "/tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/tests/hpc/tests_skip_toolkit.yml:134" }, { "ansible_version": "2.17.14", "delta": "0:00:00.003241", "end_time": "2026-01-30 16:06:38.164379", "host": "managed-node1", "loop_item": { "setting": "net.ipv4.neigh.default.gc_thresh2", "value": 8192 }, "loop_label": "", "loop_var": "item", "message": "", "rc": 0, "start_time": "2026-01-30 16:06:38.161138", "stdout": "512", "task_name": "Verify sysctl settings set from templates", "task_path": "/tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/tests/hpc/tests_skip_toolkit.yml:134" }, { "ansible_version": "2.17.14", "delta": "0:00:00.003265", "end_time": "2026-01-30 16:06:38.501350", "host": "managed-node1", "loop_item": { "setting": "net.ipv4.neigh.default.gc_thresh3", "value": 16384 }, "loop_label": "", "loop_var": "item", "message": "", "rc": 0, "start_time": "2026-01-30 16:06:38.498085", "stdout": "1024", "task_name": "Verify sysctl settings set from templates", "task_path": "/tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/tests/hpc/tests_skip_toolkit.yml:134" }, { "ansible_version": "2.17.14", "delta": "0:00:00.003241", "end_time": "2026-01-30 16:06:38.843881", "host": "managed-node1", "loop_item": { "setting": "sunrpc.tcp_max_slot_table_entries", "value": 128 }, "loop_label": "", "loop_var": "item", "message": "", "rc": 0, "start_time": "2026-01-30 16:06:38.840640", "stdout": "65536", "task_name": "Verify sysctl settings set from templates", "task_path": "/tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/tests/hpc/tests_skip_toolkit.yml:134" } ] SYSTEM ROLES ERRORS END v1 TASKS RECAP ******************************************************************** Friday 30 January 2026 16:07:11 -0500 (0:00:01.156) 0:03:46.246 ******** =============================================================================== fedora.linux_system_roles.storage : Make sure blivet is available ----- 100.35s /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 fedora.linux_system_roles.hpc : Install NVIDIA NCCL -------------------- 23.27s /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:371 fedora.linux_system_roles.storage : Make sure blivet is available ------ 18.80s /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:2 fedora.linux_system_roles.hpc : Prevent installation of all kernel packages of a different version --- 7.43s /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:266 fedora.linux_system_roles.hpc : Install RDMA packages ------------------- 4.16s /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:407 fedora.linux_system_roles.hpc : Install kernel-devel and kernel-headers packages for all kernels --- 3.51s /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:241 fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 3.14s /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 fedora.linux_system_roles.hpc : Prevent update of NVIDIA NCCL packages --- 2.52s /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:380 fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 2.41s /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 fedora.linux_system_roles.storage : Get service facts ------------------- 2.07s /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:52 fedora.linux_system_roles.storage : Get service facts ------------------- 1.98s /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:52 fedora.linux_system_roles.storage : Get service facts ------------------- 1.89s /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:52 Verify sysctl settings set from templates ------------------------------- 1.74s /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/tests/hpc/tests_skip_toolkit.yml:134 fedora.linux_system_roles.hpc : Create Azure HPC resource directories --- 1.73s /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:750 fedora.linux_system_roles.storage : Manage the pools and volumes to match the specified state --- 1.63s /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:70 fedora.linux_system_roles.hpc : Deploy GPG keys for repositories -------- 1.60s /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:27 fedora.linux_system_roles.hpc : Install EPEL release package ------------ 1.39s /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/hpc/tasks/main.yml:37 fedora.linux_system_roles.storage : Get required packages --------------- 1.29s /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 fedora.linux_system_roles.storage : Get required packages --------------- 1.28s /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/roles/storage/tasks/main-blivet.yml:19 Ensure test packages ---------------------------------------------------- 1.27s /tmp/collections-GW7/ansible_collections/fedora/linux_system_roles/tests/hpc/tests_skip_toolkit.yml:32 Jan 30 16:03:26 managed-node1 python3.9[61618]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Jan 30 16:03:27 managed-node1 python3.9[61795]: ansible-ansible.legacy.dnf Invoked with name=['util-linux-core'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 30 16:03:28 managed-node1 python3.9[61945]: ansible-fedora.linux_system_roles.find_unused_disk Invoked with max_return=10 min_size=0 max_size=0 match_sector_size=False with_interface=None Jan 30 16:03:28 managed-node1 python3.9[62098]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 30 16:03:29 managed-node1 python3.9[62247]: ansible-ansible.legacy.dnf Invoked with name=['python3-blivet', 'libblockdev-crypto', 'libblockdev-dm', 'libblockdev-lvm', 'libblockdev-mdraid', 'libblockdev-swap', 'vdo', 'kmod-kvdo', 'xfsprogs', 'stratisd', 'stratis-cli', 'libblockdev'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 30 16:03:32 managed-node1 systemd[1]: Reloading. Jan 30 16:03:32 managed-node1 systemd-rc-local-generator[62333]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 30 16:03:32 managed-node1 systemd[1]: Configuration file /etc/systemd/system/sku_customisation.service is marked executable. Please remove executable permission bits. Proceeding anyway. Jan 30 16:03:32 managed-node1 systemd[1]: Listening on Device-mapper event daemon FIFOs. â–‘â–‘ Subject: A start job for unit dm-event.socket has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit dm-event.socket has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 2000. Jan 30 16:03:33 managed-node1 systemd[1]: Reloading. Jan 30 16:03:33 managed-node1 systemd-rc-local-generator[62364]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 30 16:03:33 managed-node1 systemd[1]: Configuration file /etc/systemd/system/sku_customisation.service is marked executable. Please remove executable permission bits. Proceeding anyway. Jan 30 16:03:33 managed-node1 systemd[1]: Starting Monitoring of LVM2 mirrors, snapshots etc. using dmeventd or progress polling... â–‘â–‘ Subject: A start job for unit lvm2-monitor.service has begun execution â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit lvm2-monitor.service has begun execution. â–‘â–‘ â–‘â–‘ The job identifier is 2004. Jan 30 16:03:33 managed-node1 systemd[1]: Finished Monitoring of LVM2 mirrors, snapshots etc. using dmeventd or progress polling. â–‘â–‘ Subject: A start job for unit lvm2-monitor.service has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit lvm2-monitor.service has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 2004. Jan 30 16:03:33 managed-node1 systemd[1]: Reloading. Jan 30 16:03:33 managed-node1 systemd-rc-local-generator[62398]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 30 16:03:33 managed-node1 systemd[1]: Configuration file /etc/systemd/system/sku_customisation.service is marked executable. Please remove executable permission bits. Proceeding anyway. Jan 30 16:03:33 managed-node1 systemd[1]: Listening on LVM2 poll daemon socket. â–‘â–‘ Subject: A start job for unit lvm2-lvmpolld.socket has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit lvm2-lvmpolld.socket has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 2010. Jan 30 16:03:33 managed-node1 dbus-broker-launch[591]: Noticed file-system modification, trigger reload. â–‘â–‘ Subject: A configuration directory was written to â–‘â–‘ Defined-By: dbus-broker â–‘â–‘ Support: https://groups.google.com/forum/#!forum/bus1-devel â–‘â–‘ â–‘â–‘ A write was detected to one of the directories containing D-Bus configuration â–‘â–‘ files, triggering a configuration reload. â–‘â–‘ â–‘â–‘ This functionality exists for backwards compatibility to pick up changes to â–‘â–‘ D-Bus configuration without an explicit reolad request. Typically when â–‘â–‘ installing or removing third-party software causes D-Bus configuration files â–‘â–‘ to be added or removed. â–‘â–‘ â–‘â–‘ It is worth noting that this may cause partial configuration to be loaded in â–‘â–‘ case dispatching this notification races with the writing of the configuration â–‘â–‘ files. However, a future notification will then cause the configuration to be â–‘â–‘ reladed again. Jan 30 16:03:33 managed-node1 dbus-broker-launch[591]: Noticed file-system modification, trigger reload. â–‘â–‘ Subject: A configuration directory was written to â–‘â–‘ Defined-By: dbus-broker â–‘â–‘ Support: https://groups.google.com/forum/#!forum/bus1-devel â–‘â–‘ â–‘â–‘ A write was detected to one of the directories containing D-Bus configuration â–‘â–‘ files, triggering a configuration reload. â–‘â–‘ â–‘â–‘ This functionality exists for backwards compatibility to pick up changes to â–‘â–‘ D-Bus configuration without an explicit reolad request. Typically when â–‘â–‘ installing or removing third-party software causes D-Bus configuration files â–‘â–‘ to be added or removed. â–‘â–‘ â–‘â–‘ It is worth noting that this may cause partial configuration to be loaded in â–‘â–‘ case dispatching this notification races with the writing of the configuration â–‘â–‘ files. However, a future notification will then cause the configuration to be â–‘â–‘ reladed again. Jan 30 16:03:34 managed-node1 groupadd[62414]: group added to /etc/group: name=clevis, GID=996 Jan 30 16:03:34 managed-node1 groupadd[62414]: group added to /etc/gshadow: name=clevis Jan 30 16:03:34 managed-node1 groupadd[62414]: new group: name=clevis, GID=996 Jan 30 16:03:34 managed-node1 useradd[62421]: new user: name=clevis, UID=996, GID=996, home=/var/cache/clevis, shell=/usr/sbin/nologin, from=none Jan 30 16:03:34 managed-node1 usermod[62431]: add 'clevis' to group 'tss' Jan 30 16:03:34 managed-node1 usermod[62431]: add 'clevis' to shadow group 'tss' Jan 30 16:03:34 managed-node1 dbus-broker-launch[591]: Noticed file-system modification, trigger reload. â–‘â–‘ Subject: A configuration directory was written to â–‘â–‘ Defined-By: dbus-broker â–‘â–‘ Support: https://groups.google.com/forum/#!forum/bus1-devel â–‘â–‘ â–‘â–‘ A write was detected to one of the directories containing D-Bus configuration â–‘â–‘ files, triggering a configuration reload. â–‘â–‘ â–‘â–‘ This functionality exists for backwards compatibility to pick up changes to â–‘â–‘ D-Bus configuration without an explicit reolad request. Typically when â–‘â–‘ installing or removing third-party software causes D-Bus configuration files â–‘â–‘ to be added or removed. â–‘â–‘ â–‘â–‘ It is worth noting that this may cause partial configuration to be loaded in â–‘â–‘ case dispatching this notification races with the writing of the configuration â–‘â–‘ files. However, a future notification will then cause the configuration to be â–‘â–‘ reladed again. Jan 30 16:03:34 managed-node1 dbus-broker-launch[591]: Noticed file-system modification, trigger reload. â–‘â–‘ Subject: A configuration directory was written to â–‘â–‘ Defined-By: dbus-broker â–‘â–‘ Support: https://groups.google.com/forum/#!forum/bus1-devel â–‘â–‘ â–‘â–‘ A write was detected to one of the directories containing D-Bus configuration â–‘â–‘ files, triggering a configuration reload. â–‘â–‘ â–‘â–‘ This functionality exists for backwards compatibility to pick up changes to â–‘â–‘ D-Bus configuration without an explicit reolad request. Typically when â–‘â–‘ installing or removing third-party software causes D-Bus configuration files â–‘â–‘ to be added or removed. â–‘â–‘ â–‘â–‘ It is worth noting that this may cause partial configuration to be loaded in â–‘â–‘ case dispatching this notification races with the writing of the configuration â–‘â–‘ files. However, a future notification will then cause the configuration to be â–‘â–‘ reladed again. Jan 30 16:04:04 managed-node1 systemd[4030]: Created slice User Background Tasks Slice. â–‘â–‘ Subject: A start job for unit UNIT has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit UNIT has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 21. Jan 30 16:04:04 managed-node1 systemd[4030]: Starting Cleanup of User's Temporary Files and Directories... â–‘â–‘ Subject: A start job for unit UNIT has begun execution â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit UNIT has begun execution. â–‘â–‘ â–‘â–‘ The job identifier is 20. Jan 30 16:04:04 managed-node1 systemd[4030]: Finished Cleanup of User's Temporary Files and Directories. â–‘â–‘ Subject: A start job for unit UNIT has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit UNIT has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 20. Jan 30 16:04:16 managed-node1 dracut[62966]: dracut-057-102.git20250818.el9 Jan 30 16:04:17 managed-node1 dracut[62968]: Executing: /usr/bin/dracut -f /boot/initramfs-5.14.0-654.el9.x86_64.tmp 5.14.0-654.el9.x86_64 Jan 30 16:04:17 managed-node1 dracut[62968]: dracut module 'systemd-networkd' will not be installed, because command 'networkctl' could not be found! Jan 30 16:04:17 managed-node1 dracut[62968]: dracut module 'systemd-networkd' will not be installed, because command '/usr/lib/systemd/systemd-networkd' could not be found! Jan 30 16:04:17 managed-node1 dracut[62968]: dracut module 'systemd-networkd' will not be installed, because command '/usr/lib/systemd/systemd-networkd-wait-online' could not be found! Jan 30 16:04:17 managed-node1 dracut[62968]: dracut module 'systemd-resolved' will not be installed, because command 'resolvectl' could not be found! Jan 30 16:04:17 managed-node1 dracut[62968]: dracut module 'systemd-resolved' will not be installed, because command '/usr/lib/systemd/systemd-resolved' could not be found! Jan 30 16:04:17 managed-node1 dracut[62968]: dracut module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-timesyncd' could not be found! Jan 30 16:04:17 managed-node1 dracut[62968]: dracut module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-time-wait-sync' could not be found! Jan 30 16:04:17 managed-node1 dracut[62968]: dracut module 'busybox' will not be installed, because command 'busybox' could not be found! Jan 30 16:04:17 managed-node1 dracut[62968]: dracut module 'dbus-daemon' will not be installed, because command 'dbus-daemon' could not be found! Jan 30 16:04:17 managed-node1 dracut[62968]: dracut module 'connman' will not be installed, because command 'connmand' could not be found! Jan 30 16:04:17 managed-node1 dracut[62968]: dracut module 'connman' will not be installed, because command 'connmanctl' could not be found! Jan 30 16:04:17 managed-node1 dracut[62968]: dracut module 'connman' will not be installed, because command 'connmand-wait-online' could not be found! Jan 30 16:04:17 managed-node1 dracut[62968]: dracut module 'network-wicked' will not be installed, because command 'wicked' could not be found! Jan 30 16:04:17 managed-node1 dracut[62968]: 62bluetooth: Could not find any command of '/usr/lib/bluetooth/bluetoothd /usr/libexec/bluetooth/bluetoothd'! Jan 30 16:04:17 managed-node1 dracut[62968]: dracut module 'btrfs' will not be installed, because command 'btrfs' could not be found! Jan 30 16:04:17 managed-node1 dracut[62968]: dracut module 'dmraid' will not be installed, because command 'dmraid' could not be found! Jan 30 16:04:17 managed-node1 dracut[62968]: dracut module 'pcsc' will not be installed, because command 'pcscd' could not be found! Jan 30 16:04:17 managed-node1 dracut[62968]: dracut module 'cifs' will not be installed, because command 'mount.cifs' could not be found! Jan 30 16:04:17 managed-node1 dracut[62968]: dracut module 'iscsi' will not be installed, because command 'iscsi-iname' could not be found! Jan 30 16:04:17 managed-node1 dracut[62968]: dracut module 'iscsi' will not be installed, because command 'iscsiadm' could not be found! Jan 30 16:04:17 managed-node1 dracut[62968]: dracut module 'iscsi' will not be installed, because command 'iscsid' could not be found! Jan 30 16:04:17 managed-node1 dracut[62968]: dracut module 'nvmf' will not be installed, because command 'nvme' could not be found! Jan 30 16:04:17 managed-node1 dracut[62968]: dracut module 'biosdevname' will not be installed, because command 'biosdevname' could not be found! Jan 30 16:04:17 managed-node1 dracut[62968]: dracut module 'memstrack' will not be installed, because command 'memstrack' could not be found! Jan 30 16:04:17 managed-node1 dracut[62968]: memstrack is not available Jan 30 16:04:17 managed-node1 dracut[62968]: If you need to use rd.memdebug>=4, please install memstrack and procps-ng Jan 30 16:04:17 managed-node1 dracut[62968]: dracut module 'systemd-resolved' will not be installed, because command 'resolvectl' could not be found! Jan 30 16:04:17 managed-node1 dracut[62968]: dracut module 'systemd-resolved' will not be installed, because command '/usr/lib/systemd/systemd-resolved' could not be found! Jan 30 16:04:17 managed-node1 dracut[62968]: dracut module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-timesyncd' could not be found! Jan 30 16:04:17 managed-node1 dracut[62968]: dracut module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-time-wait-sync' could not be found! Jan 30 16:04:17 managed-node1 dracut[62968]: dracut module 'busybox' will not be installed, because command 'busybox' could not be found! Jan 30 16:04:17 managed-node1 dracut[62968]: dracut module 'dbus-daemon' will not be installed, because command 'dbus-daemon' could not be found! Jan 30 16:04:17 managed-node1 dracut[62968]: dracut module 'connman' will not be installed, because command 'connmand' could not be found! Jan 30 16:04:17 managed-node1 dracut[62968]: dracut module 'connman' will not be installed, because command 'connmanctl' could not be found! Jan 30 16:04:17 managed-node1 dracut[62968]: dracut module 'connman' will not be installed, because command 'connmand-wait-online' could not be found! Jan 30 16:04:17 managed-node1 dracut[62968]: dracut module 'network-wicked' will not be installed, because command 'wicked' could not be found! Jan 30 16:04:17 managed-node1 dracut[62968]: 62bluetooth: Could not find any command of '/usr/lib/bluetooth/bluetoothd /usr/libexec/bluetooth/bluetoothd'! Jan 30 16:04:17 managed-node1 dracut[62968]: dracut module 'btrfs' will not be installed, because command 'btrfs' could not be found! Jan 30 16:04:17 managed-node1 dracut[62968]: dracut module 'dmraid' will not be installed, because command 'dmraid' could not be found! Jan 30 16:04:17 managed-node1 dracut[62968]: dracut module 'pcsc' will not be installed, because command 'pcscd' could not be found! Jan 30 16:04:17 managed-node1 dracut[62968]: dracut module 'cifs' will not be installed, because command 'mount.cifs' could not be found! Jan 30 16:04:17 managed-node1 dracut[62968]: dracut module 'iscsi' will not be installed, because command 'iscsi-iname' could not be found! Jan 30 16:04:17 managed-node1 dracut[62968]: dracut module 'iscsi' will not be installed, because command 'iscsiadm' could not be found! Jan 30 16:04:17 managed-node1 dracut[62968]: dracut module 'iscsi' will not be installed, because command 'iscsid' could not be found! Jan 30 16:04:17 managed-node1 dracut[62968]: dracut module 'nvmf' will not be installed, because command 'nvme' could not be found! Jan 30 16:04:17 managed-node1 dracut[62968]: dracut module 'memstrack' will not be installed, because command 'memstrack' could not be found! Jan 30 16:04:17 managed-node1 dracut[62968]: memstrack is not available Jan 30 16:04:17 managed-node1 dracut[62968]: If you need to use rd.memdebug>=4, please install memstrack and procps-ng Jan 30 16:04:17 managed-node1 dracut[62968]: *** Including module: bash *** Jan 30 16:04:17 managed-node1 dracut[62968]: *** Including module: systemd *** Jan 30 16:04:17 managed-node1 dracut[62968]: *** Including module: fips *** Jan 30 16:04:17 managed-node1 dracut[62968]: *** Including module: systemd-initrd *** Jan 30 16:04:17 managed-node1 dracut[62968]: *** Including module: systemd-sysusers *** Jan 30 16:04:17 managed-node1 dracut[62968]: *** Including module: nss-softokn *** Jan 30 16:04:17 managed-node1 dracut[62968]: *** Including module: dbus-broker *** Jan 30 16:04:18 managed-node1 dracut[62968]: *** Including module: rngd *** Jan 30 16:04:18 managed-node1 dracut[62968]: *** Including module: dbus *** Jan 30 16:04:18 managed-node1 dracut[62968]: *** Including module: i18n *** Jan 30 16:04:18 managed-node1 dracut[62968]: *** Including module: network-manager *** Jan 30 16:04:18 managed-node1 dracut[62968]: *** Including module: network *** Jan 30 16:04:18 managed-node1 dracut[62968]: *** Including module: ifcfg *** Jan 30 16:04:18 managed-node1 dracut[62968]: *** Including module: prefixdevname *** Jan 30 16:04:18 managed-node1 dracut[62968]: *** Including module: kernel-modules *** Jan 30 16:04:18 managed-node1 dracut[62968]: *** Including module: kernel-modules-extra *** Jan 30 16:04:18 managed-node1 dracut[62968]: kernel-modules-extra: configuration source "/run/depmod.d" does not exist Jan 30 16:04:18 managed-node1 dracut[62968]: kernel-modules-extra: configuration source "/lib/depmod.d" does not exist Jan 30 16:04:18 managed-node1 dracut[62968]: kernel-modules-extra: parsing configuration file "/etc/depmod.d/dist.conf" Jan 30 16:04:18 managed-node1 dracut[62968]: kernel-modules-extra: /etc/depmod.d/dist.conf: added "updates extra built-in weak-updates" to the list of search directories Jan 30 16:04:18 managed-node1 dracut[62968]: kernel-modules-extra: parsing configuration file "/etc/depmod.d/kvdo.conf" Jan 30 16:04:18 managed-node1 dracut[62968]: kernel-modules-extra: /etc/depmod.d/kvdo.conf: added override "weak-updates/kmod-kvdo/vdo/kvdo" Jan 30 16:04:18 managed-node1 dracut[62968]: *** Including module: kernel-network-modules *** Jan 30 16:04:18 managed-node1 dracut[62968]: *** Including module: resume *** Jan 30 16:04:18 managed-node1 dracut[62968]: *** Including module: rootfs-block *** Jan 30 16:04:18 managed-node1 dracut[62968]: *** Including module: terminfo *** Jan 30 16:04:18 managed-node1 dracut[62968]: *** Including module: udev-rules *** Jan 30 16:04:19 managed-node1 dracut[62968]: Skipping udev rule: 91-permissions.rules Jan 30 16:04:19 managed-node1 dracut[62968]: Skipping udev rule: 80-drivers-modprobe.rules Jan 30 16:04:19 managed-node1 dracut[62968]: *** Including module: dracut-systemd *** Jan 30 16:04:19 managed-node1 dracut[62968]: *** Including module: usrmount *** Jan 30 16:04:19 managed-node1 dracut[62968]: *** Including module: base *** Jan 30 16:04:19 managed-node1 dracut[62968]: *** Including module: fs-lib *** Jan 30 16:04:19 managed-node1 dracut[62968]: *** Including module: microcode_ctl-fw_dir_override *** Jan 30 16:04:19 managed-node1 dracut[62968]: microcode_ctl module: mangling fw_dir Jan 30 16:04:19 managed-node1 dracut[62968]: microcode_ctl: reset fw_dir to "/lib/firmware/updates /lib/firmware" Jan 30 16:04:19 managed-node1 dracut[62968]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel"... Jan 30 16:04:19 managed-node1 dracut[62968]: microcode_ctl: intel: caveats check for kernel version "5.14.0-654.el9.x86_64" passed, adding "/usr/share/microcode_ctl/ucode_with_caveats/intel" to fw_dir variable Jan 30 16:04:19 managed-node1 dracut[62968]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-2d-07"... Jan 30 16:04:19 managed-node1 dracut[62968]: microcode_ctl: configuration "intel-06-2d-07" is ignored Jan 30 16:04:19 managed-node1 dracut[62968]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-4e-03"... Jan 30 16:04:19 managed-node1 dracut[62968]: microcode_ctl: configuration "intel-06-4e-03" is ignored Jan 30 16:04:19 managed-node1 dracut[62968]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-4f-01"... Jan 30 16:04:19 managed-node1 dracut[62968]: microcode_ctl: configuration "intel-06-4f-01" is ignored Jan 30 16:04:19 managed-node1 dracut[62968]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-55-04"... Jan 30 16:04:19 managed-node1 dracut[62968]: microcode_ctl: configuration "intel-06-55-04" is ignored Jan 30 16:04:19 managed-node1 dracut[62968]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-5e-03"... Jan 30 16:04:19 managed-node1 dracut[62968]: microcode_ctl: configuration "intel-06-5e-03" is ignored Jan 30 16:04:19 managed-node1 dracut[62968]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-8c-01"... Jan 30 16:04:19 managed-node1 dracut[62968]: microcode_ctl: configuration "intel-06-8c-01" is ignored Jan 30 16:04:19 managed-node1 dracut[62968]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-8e-9e-0x-0xca"... Jan 30 16:04:19 managed-node1 dracut[62968]: microcode_ctl: configuration "intel-06-8e-9e-0x-0xca" is ignored Jan 30 16:04:19 managed-node1 dracut[62968]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-8e-9e-0x-dell"... Jan 30 16:04:19 managed-node1 dracut[62968]: microcode_ctl: configuration "intel-06-8e-9e-0x-dell" is ignored Jan 30 16:04:19 managed-node1 dracut[62968]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-8f-08"... Jan 30 16:04:19 managed-node1 dracut[62968]: microcode_ctl: configuration "intel-06-8f-08" is ignored Jan 30 16:04:19 managed-node1 dracut[62968]: microcode_ctl: final fw_dir: "/usr/share/microcode_ctl/ucode_with_caveats/intel /lib/firmware/updates /lib/firmware" Jan 30 16:04:19 managed-node1 dracut[62968]: *** Including module: openssl *** Jan 30 16:04:19 managed-node1 dracut[62968]: *** Including module: shutdown *** Jan 30 16:04:19 managed-node1 dracut[62968]: *** Including modules done *** Jan 30 16:04:19 managed-node1 dracut[62968]: *** Installing kernel module dependencies *** Jan 30 16:04:20 managed-node1 dracut[62968]: *** Installing kernel module dependencies done *** Jan 30 16:04:20 managed-node1 dracut[62968]: *** Resolving executable dependencies *** Jan 30 16:04:21 managed-node1 dracut[62968]: *** Resolving executable dependencies done *** Jan 30 16:04:21 managed-node1 dracut[62968]: *** Generating early-microcode cpio image *** Jan 30 16:04:21 managed-node1 dracut[62968]: *** Constructing GenuineIntel.bin *** Jan 30 16:04:21 managed-node1 dracut[62968]: *** Constructing GenuineIntel.bin *** Jan 30 16:04:21 managed-node1 dracut[62968]: *** Store current command line parameters *** Jan 30 16:04:21 managed-node1 dracut[62968]: *** Creating image file '/boot/initramfs-5.14.0-654.el9.x86_64.tmp' *** Jan 30 16:04:21 managed-node1 dracut[62968]: *** Hardlinking files *** Jan 30 16:04:22 managed-node1 dracut[62968]: Mode: real Jan 30 16:04:22 managed-node1 dracut[62968]: Files: 1680 Jan 30 16:04:22 managed-node1 dracut[62968]: Linked: 11 files Jan 30 16:04:22 managed-node1 dracut[62968]: Compared: 0 xattrs Jan 30 16:04:22 managed-node1 dracut[62968]: Compared: 412 files Jan 30 16:04:22 managed-node1 dracut[62968]: Saved: 27.36 KiB Jan 30 16:04:22 managed-node1 dracut[62968]: Duration: 0.110170 seconds Jan 30 16:04:22 managed-node1 dracut[62968]: *** Hardlinking files done *** Jan 30 16:04:22 managed-node1 dracut[62968]: dracut: using auto-determined compression method 'pigz' Jan 30 16:04:30 managed-node1 dracut[62968]: Image: /var/tmp/dracut.wNEDFK/initramfs.img: 57M Jan 30 16:04:30 managed-node1 dracut[62968]: ======================================================================== Jan 30 16:04:30 managed-node1 dracut[62968]: Early CPIO image Jan 30 16:04:30 managed-node1 dracut[62968]: ======================================================================== Jan 30 16:04:30 managed-node1 dracut[62968]: drwxr-xr-x 3 root root 0 Aug 18 10:11 . Jan 30 16:04:30 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2 Aug 18 10:11 early_cpio Jan 30 16:04:30 managed-node1 dracut[62968]: drwxr-xr-x 3 root root 0 Aug 18 10:11 kernel Jan 30 16:04:30 managed-node1 dracut[62968]: drwxr-xr-x 3 root root 0 Aug 18 10:11 kernel/x86 Jan 30 16:04:30 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 kernel/x86/microcode Jan 30 16:04:30 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 38912 Aug 18 10:11 kernel/x86/microcode/GenuineIntel.bin Jan 30 16:04:30 managed-node1 dracut[62968]: ======================================================================== Jan 30 16:04:31 managed-node1 dracut[62968]: Version: Jan 30 16:04:31 managed-node1 dracut[62968]: Arguments: -f Jan 30 16:04:32 managed-node1 dracut[62968]: dracut modules: Jan 30 16:04:32 managed-node1 dracut[62968]: bash Jan 30 16:04:32 managed-node1 dracut[62968]: systemd Jan 30 16:04:32 managed-node1 dracut[62968]: fips Jan 30 16:04:32 managed-node1 dracut[62968]: systemd-initrd Jan 30 16:04:32 managed-node1 dracut[62968]: systemd-sysusers Jan 30 16:04:32 managed-node1 dracut[62968]: nss-softokn Jan 30 16:04:32 managed-node1 dracut[62968]: dbus-broker Jan 30 16:04:32 managed-node1 dracut[62968]: rngd Jan 30 16:04:32 managed-node1 dracut[62968]: dbus Jan 30 16:04:32 managed-node1 dracut[62968]: i18n Jan 30 16:04:32 managed-node1 dracut[62968]: network-manager Jan 30 16:04:32 managed-node1 dracut[62968]: network Jan 30 16:04:32 managed-node1 dracut[62968]: ifcfg Jan 30 16:04:32 managed-node1 dracut[62968]: prefixdevname Jan 30 16:04:32 managed-node1 dracut[62968]: kernel-modules Jan 30 16:04:32 managed-node1 dracut[62968]: kernel-modules-extra Jan 30 16:04:32 managed-node1 dracut[62968]: kernel-network-modules Jan 30 16:04:32 managed-node1 dracut[62968]: resume Jan 30 16:04:32 managed-node1 dracut[62968]: rootfs-block Jan 30 16:04:32 managed-node1 dracut[62968]: terminfo Jan 30 16:04:32 managed-node1 dracut[62968]: udev-rules Jan 30 16:04:32 managed-node1 dracut[62968]: dracut-systemd Jan 30 16:04:32 managed-node1 dracut[62968]: usrmount Jan 30 16:04:32 managed-node1 dracut[62968]: base Jan 30 16:04:32 managed-node1 dracut[62968]: fs-lib Jan 30 16:04:32 managed-node1 dracut[62968]: microcode_ctl-fw_dir_override Jan 30 16:04:32 managed-node1 dracut[62968]: openssl Jan 30 16:04:32 managed-node1 dracut[62968]: shutdown Jan 30 16:04:33 managed-node1 dracut[62968]: ======================================================================== Jan 30 16:04:33 managed-node1 dracut[62968]: drwxr-xr-x 12 root root 0 Aug 18 10:11 . Jan 30 16:04:33 managed-node1 dracut[62968]: crw-r--r-- 1 root root 5, 1 Aug 18 10:11 dev/console Jan 30 16:04:33 managed-node1 dracut[62968]: crw-r--r-- 1 root root 1, 11 Aug 18 10:11 dev/kmsg Jan 30 16:04:33 managed-node1 dracut[62968]: crw-r--r-- 1 root root 1, 3 Aug 18 10:11 dev/null Jan 30 16:04:33 managed-node1 dracut[62968]: crw-r--r-- 1 root root 1, 8 Aug 18 10:11 dev/random Jan 30 16:04:33 managed-node1 dracut[62968]: crw-r--r-- 1 root root 1, 9 Aug 18 10:11 dev/urandom Jan 30 16:04:33 managed-node1 dracut[62968]: lrwxrwxrwx 1 root root 7 Aug 18 10:11 bin -> usr/bin Jan 30 16:04:33 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 dev Jan 30 16:04:33 managed-node1 dracut[62968]: drwxr-xr-x 16 root root 0 Aug 18 10:11 etc Jan 30 16:04:33 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 etc/authselect Jan 30 16:04:33 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3012 Aug 18 10:11 etc/authselect/nsswitch.conf Jan 30 16:04:33 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 etc/cmdline.d Jan 30 16:04:33 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 etc/conf.d Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 124 Aug 18 10:11 etc/conf.d/systemd.conf Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 3 root root 0 Aug 18 10:11 etc/crypto-policies Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 etc/crypto-policies/back-ends Jan 30 16:04:34 managed-node1 dracut[62968]: lrwxrwxrwx 1 root root 57 Aug 18 10:11 etc/crypto-policies/back-ends/opensslcnf.config -> ../../../usr/share/crypto-policies/DEFAULT/opensslcnf.txt Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 7 root root 0 Aug 18 10:11 etc/dbus-1 Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 etc/dbus-1/interfaces Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 etc/dbus-1/services Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 838 Jun 12 2023 etc/dbus-1/session.conf Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 etc/dbus-1/session.d Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 etc/dbus-1/system-services Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 833 Jun 12 2023 etc/dbus-1/system.conf Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 etc/dbus-1/system.d Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 409 Jan 9 2020 etc/dbus-1/system.d/teamd.conf Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 etc/depmod.d Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 116 Aug 18 08:59 etc/depmod.d/dist.conf Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 58 Aug 18 10:11 etc/depmod.d/kvdo.conf Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 147 Aug 18 10:11 etc/fipsmodules Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 0 Aug 18 10:11 etc/fstab.empty Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 224 Aug 18 10:11 etc/group Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 14 Aug 18 10:11 etc/hostname Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 184 Aug 18 10:11 etc/hosts Jan 30 16:04:34 managed-node1 dracut[62968]: lrwxrwxrwx 1 root root 25 Aug 18 10:11 etc/initrd-release -> ../usr/lib/initrd-release Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 5043 Aug 18 10:11 etc/ld.so.cache Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 28 Aug 2 2021 etc/ld.so.conf Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 etc/ld.so.conf.d Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 19 May 2 2025 etc/ld.so.conf.d/dyninst-x86_64.conf Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 17 Aug 18 10:11 etc/locale.conf Jan 30 16:04:34 managed-node1 dracut[62968]: -r--r--r-- 1 root root 33 Aug 18 10:11 etc/machine-id Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 etc/modprobe.d Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 387 Aug 18 10:11 etc/modprobe.d/fips.conf Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 158 Jul 1 2025 etc/modprobe.d/firewalld-sysctls.conf Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 747 Aug 18 10:11 etc/modprobe.d/lockd.conf Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 101 Jun 11 2025 etc/modprobe.d/nvdimm-security.conf Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 etc/modules-load.d Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 7 Aug 18 10:11 etc/modules-load.d/sunrpc.conf Jan 30 16:04:34 managed-node1 dracut[62968]: lrwxrwxrwx 1 root root 17 Aug 18 10:11 etc/mtab -> /proc/self/mounts Jan 30 16:04:34 managed-node1 dracut[62968]: lrwxrwxrwx 1 root root 24 Aug 18 10:11 etc/nsswitch.conf -> authselect/nsswitch.conf Jan 30 16:04:34 managed-node1 dracut[62968]: lrwxrwxrwx 1 root root 14 Aug 18 10:11 etc/os-release -> initrd-release Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 121 Aug 18 10:11 etc/passwd Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 3 root root 0 Aug 18 10:11 etc/pki Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 etc/pki/tls Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 4690 Aug 18 10:11 etc/pki/tls/openssl.cnf Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 etc/sysconfig Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 58 Aug 18 10:10 etc/sysconfig/rngd Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 449 Aug 18 10:11 etc/sysctl.conf Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 etc/sysctl.d Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 377 Aug 18 10:11 etc/sysctl.d/90-hpc-sysctl.conf Jan 30 16:04:34 managed-node1 dracut[62968]: lrwxrwxrwx 1 root root 14 Aug 18 10:11 etc/sysctl.d/99-sysctl.conf -> ../sysctl.conf Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 3 root root 0 Aug 18 10:11 etc/systemd Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1342 Aug 18 10:11 etc/systemd/journald.conf Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 8 root root 0 Aug 18 10:11 etc/systemd/system Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2082 Aug 18 10:11 etc/systemd/system.conf Jan 30 16:04:34 managed-node1 dracut[62968]: lrwxrwxrwx 1 root root 43 Aug 18 10:11 etc/systemd/system/dbus.service -> /usr/lib/systemd/system/dbus-broker.service Jan 30 16:04:34 managed-node1 dracut[62968]: lrwxrwxrwx 1 root root 37 Aug 18 10:11 etc/systemd/system/default.target -> /usr/lib/systemd/system/initrd.target Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 etc/systemd/system/emergency.target.wants Jan 30 16:04:34 managed-node1 dracut[62968]: lrwxrwxrwx 1 root root 54 Aug 18 10:11 etc/systemd/system/emergency.target.wants/systemd-vconsole-setup.service -> /usr/lib/systemd/system/systemd-vconsole-setup.service Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 etc/systemd/system/initrd.target.wants Jan 30 16:04:34 managed-node1 dracut[62968]: lrwxrwxrwx 1 root root 50 Aug 18 10:11 etc/systemd/system/initrd.target.wants/dracut-cmdline-ask.service -> /usr/lib/systemd/system/dracut-cmdline-ask.service Jan 30 16:04:34 managed-node1 dracut[62968]: lrwxrwxrwx 1 root root 46 Aug 18 10:11 etc/systemd/system/initrd.target.wants/dracut-cmdline.service -> /usr/lib/systemd/system/dracut-cmdline.service Jan 30 16:04:34 managed-node1 dracut[62968]: lrwxrwxrwx 1 root root 48 Aug 18 10:11 etc/systemd/system/initrd.target.wants/dracut-initqueue.service -> /usr/lib/systemd/system/dracut-initqueue.service Jan 30 16:04:34 managed-node1 dracut[62968]: lrwxrwxrwx 1 root root 44 Aug 18 10:11 etc/systemd/system/initrd.target.wants/dracut-mount.service -> /usr/lib/systemd/system/dracut-mount.service Jan 30 16:04:34 managed-node1 dracut[62968]: lrwxrwxrwx 1 root root 48 Aug 18 10:11 etc/systemd/system/initrd.target.wants/dracut-pre-mount.service -> /usr/lib/systemd/system/dracut-pre-mount.service Jan 30 16:04:34 managed-node1 dracut[62968]: lrwxrwxrwx 1 root root 48 Aug 18 10:11 etc/systemd/system/initrd.target.wants/dracut-pre-pivot.service -> /usr/lib/systemd/system/dracut-pre-pivot.service Jan 30 16:04:34 managed-node1 dracut[62968]: lrwxrwxrwx 1 root root 50 Aug 18 10:11 etc/systemd/system/initrd.target.wants/dracut-pre-trigger.service -> /usr/lib/systemd/system/dracut-pre-trigger.service Jan 30 16:04:34 managed-node1 dracut[62968]: lrwxrwxrwx 1 root root 47 Aug 18 10:11 etc/systemd/system/initrd.target.wants/dracut-pre-udev.service -> /usr/lib/systemd/system/dracut-pre-udev.service Jan 30 16:04:34 managed-node1 dracut[62968]: lrwxrwxrwx 1 root root 41 Aug 18 10:11 etc/systemd/system/initrd.target.wants/nm-initrd.service -> /usr/lib/systemd/system/nm-initrd.service Jan 30 16:04:34 managed-node1 dracut[62968]: lrwxrwxrwx 1 root root 53 Aug 18 10:11 etc/systemd/system/initrd.target.wants/nm-wait-online-initrd.service -> /usr/lib/systemd/system/nm-wait-online-initrd.service Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 etc/systemd/system/network-online.target.wants Jan 30 16:04:34 managed-node1 dracut[62968]: lrwxrwxrwx 1 root root 53 Aug 18 10:11 etc/systemd/system/network-online.target.wants/nm-wait-online-initrd.service -> /usr/lib/systemd/system/nm-wait-online-initrd.service Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 etc/systemd/system/rescue.target.wants Jan 30 16:04:34 managed-node1 dracut[62968]: lrwxrwxrwx 1 root root 54 Aug 18 10:11 etc/systemd/system/rescue.target.wants/systemd-vconsole-setup.service -> /usr/lib/systemd/system/systemd-vconsole-setup.service Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 etc/systemd/system/sysinit.target.wants Jan 30 16:04:34 managed-node1 dracut[62968]: lrwxrwxrwx 1 root root 36 Aug 18 10:11 etc/systemd/system/sysinit.target.wants/rngd.service -> /usr/lib/systemd/system/rngd.service Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 etc/systemd/system/systemd-ask-password-console.service.wants Jan 30 16:04:34 managed-node1 dracut[62968]: lrwxrwxrwx 1 root root 54 Aug 18 10:11 etc/systemd/system/systemd-ask-password-console.service.wants/systemd-vconsole-setup.service -> /usr/lib/systemd/system/systemd-vconsole-setup.service Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 3 root root 0 Aug 18 10:11 etc/udev Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 etc/udev/rules.d Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-rw-rw- 1 root root 0 Aug 18 10:11 etc/udev/rules.d/.null Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 297 Aug 18 10:11 etc/udev/rules.d/59-persistent-storage.rules Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1030 Aug 18 10:11 etc/udev/rules.d/61-persistent-storage.rules Jan 30 16:04:34 managed-node1 dracut[62968]: lrwxrwxrwx 1 root root 5 Aug 18 10:11 etc/udev/rules.d/70-persistent-net.rules -> .null Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 305 Oct 31 2022 etc/udev/udev.conf Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 28 Aug 18 10:11 etc/vconsole.conf Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1184 Aug 18 10:11 etc/virc Jan 30 16:04:34 managed-node1 dracut[62968]: lrwxrwxrwx 1 root root 23 Aug 18 10:11 init -> usr/lib/systemd/systemd Jan 30 16:04:34 managed-node1 dracut[62968]: lrwxrwxrwx 1 root root 7 Aug 18 10:11 lib -> usr/lib Jan 30 16:04:34 managed-node1 dracut[62968]: lrwxrwxrwx 1 root root 9 Aug 18 10:11 lib64 -> usr/lib64 Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 proc Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 root Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 run Jan 30 16:04:34 managed-node1 dracut[62968]: lrwxrwxrwx 1 root root 8 Aug 18 10:11 sbin -> usr/sbin Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 4631 Jun 19 2022 shutdown Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 sys Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 sysroot Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 tmp Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 8 root root 0 Aug 18 10:11 usr Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 usr/bin Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 27896 Aug 18 10:11 usr/bin/arping Jan 30 16:04:34 managed-node1 dracut[62968]: lrwxrwxrwx 1 root root 4 Aug 18 10:11 usr/bin/awk -> gawk Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 1389072 Feb 15 2024 usr/bin/bash Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 102544 Aug 18 10:11 usr/bin/busctl Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 36496 Dec 9 2024 usr/bin/cat Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 57496 Dec 9 2024 usr/bin/chmod Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 61632 Dec 9 2024 usr/bin/chown Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 152808 Dec 9 2024 usr/bin/cp Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 48936 Dec 9 2024 usr/bin/cut Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 236336 Aug 23 2022 usr/bin/dbus-broker Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 131896 Aug 23 2022 usr/bin/dbus-broker-launch Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 73672 Jan 16 2025 usr/bin/dmesg Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 2250 Jun 19 2022 usr/bin/dracut-cmdline Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 431 Jun 19 2022 usr/bin/dracut-cmdline-ask Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 1638 Aug 18 10:10 usr/bin/dracut-emergency Jan 30 16:04:34 managed-node1 dracut[62968]: lrwxrwxrwx 1 root root 11 Aug 18 10:11 usr/bin/dracut-getarg -> dracut-util Jan 30 16:04:34 managed-node1 dracut[62968]: lrwxrwxrwx 1 root root 11 Aug 18 10:11 usr/bin/dracut-getargs -> dracut-util Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 2678 Jun 19 2022 usr/bin/dracut-initqueue Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 1095 Jun 19 2022 usr/bin/dracut-mount Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 508 Jun 19 2022 usr/bin/dracut-pre-mount Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 903 Jun 19 2022 usr/bin/dracut-pre-pivot Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 474 Jun 19 2022 usr/bin/dracut-pre-trigger Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 1415 Jun 19 2022 usr/bin/dracut-pre-udev Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 15512 Aug 18 10:11 usr/bin/dracut-util Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 36312 Dec 9 2024 usr/bin/echo Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 291760 Jul 4 2024 usr/bin/find Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 66472 Jan 16 2025 usr/bin/findmnt Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 24104 Jan 16 2025 usr/bin/flock Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 714976 Feb 16 2022 usr/bin/gawk Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 158176 Aug 9 2021 usr/bin/grep Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 91792 Apr 21 2022 usr/bin/gzip Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 90208 Aug 18 10:11 usr/bin/journalctl Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 16064 Jan 9 2025 usr/bin/kbd_mode Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 169544 Aug 18 08:59 usr/bin/kmod Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 202432 Jul 2 2025 usr/bin/less Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 61592 Dec 9 2024 usr/bin/ln Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 205336 Jan 9 2025 usr/bin/loadkeys Jan 30 16:04:34 managed-node1 dracut[62968]: lrwxrwxrwx 1 root root 4 Aug 18 10:11 usr/bin/loginctl -> true Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 140928 Dec 9 2024 usr/bin/ls Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 69936 Dec 9 2024 usr/bin/mkdir Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 40776 Dec 9 2024 usr/bin/mkfifo Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 44912 Dec 9 2024 usr/bin/mknod Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 48648 Jan 16 2025 usr/bin/mount Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 144536 Dec 9 2024 usr/bin/mv Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 23936 Aug 18 10:11 usr/bin/nm-online Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 1079672 Aug 18 10:11 usr/bin/nmcli Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 32256 Jan 23 2024 usr/bin/pgrep Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 144632 Jan 23 2024 usr/bin/ps Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 40624 Dec 9 2024 usr/bin/readlink Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 61640 Dec 9 2024 usr/bin/rm Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 116728 Aug 10 2021 usr/bin/sed Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 53064 Jan 9 2025 usr/bin/setfont Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 15696 Jan 16 2025 usr/bin/setsid Jan 30 16:04:34 managed-node1 dracut[62968]: lrwxrwxrwx 1 root root 4 Aug 18 10:11 usr/bin/sh -> bash Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 36296 Dec 7 2023 usr/bin/sha512hmac Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 36496 Dec 9 2024 usr/bin/sleep Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 115800 Dec 9 2024 usr/bin/sort Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 81968 Dec 9 2024 usr/bin/stat Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 77632 Dec 9 2024 usr/bin/stty Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 305568 Aug 18 10:11 usr/bin/systemctl Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 19920 Aug 18 10:11 usr/bin/systemd-ask-password Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 24024 Aug 18 10:11 usr/bin/systemd-cgls Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 23808 Aug 18 10:11 usr/bin/systemd-escape Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 65544 Aug 18 10:11 usr/bin/systemd-run Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 74088 Aug 18 10:11 usr/bin/systemd-sysusers Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 127512 Aug 18 10:11 usr/bin/systemd-tmpfiles Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 40296 Aug 18 10:11 usr/bin/systemd-tty-ask-password-agent Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 69712 Dec 9 2024 usr/bin/tail Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 170592 Dec 7 2022 usr/bin/teamd Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 41120 Dec 9 2024 usr/bin/timeout Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 49000 Dec 9 2024 usr/bin/tr Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 28112 Dec 9 2024 usr/bin/true Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 601944 Aug 18 10:11 usr/bin/udevadm Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 36280 Jan 16 2025 usr/bin/umount Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 32416 Dec 9 2024 usr/bin/uname Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 691 Aug 18 10:11 usr/bin/vi Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 13 root root 0 Aug 18 10:11 usr/lib Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 3 root root 0 Aug 18 10:11 usr/lib/NetworkManager Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 usr/lib/NetworkManager/conf.d Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 54 Jun 19 2022 usr/lib/NetworkManager/conf.d/initrd-no-auto-default.conf Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 3 root root 0 Aug 18 10:11 usr/lib/dracut Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 4069 Jun 19 2022 usr/lib/dracut-dev-lib.sh Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 28857 Jun 19 2022 usr/lib/dracut-lib.sh Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 4 Aug 18 10:11 usr/lib/dracut/build-parameter.txt Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 31 Aug 18 10:11 usr/lib/dracut/dracut-057-102.git20250818.el9 Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 15 root root 0 Aug 18 10:11 usr/lib/dracut/hooks Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 usr/lib/dracut/hooks/cleanup Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 usr/lib/dracut/hooks/cmdline Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 918 Jun 19 2022 usr/lib/dracut/hooks/cmdline/91-dhcp-root.sh Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 1083 Jun 19 2022 usr/lib/dracut/hooks/cmdline/99-nm-config.sh Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 usr/lib/dracut/hooks/emergency Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 6 root root 0 Aug 18 10:11 usr/lib/dracut/hooks/initqueue Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 usr/lib/dracut/hooks/initqueue/finished Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 usr/lib/dracut/hooks/initqueue/online Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 usr/lib/dracut/hooks/initqueue/settled Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 2232 Aug 18 10:10 usr/lib/dracut/hooks/initqueue/settled/99-nm-run.sh Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 usr/lib/dracut/hooks/initqueue/timeout Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 463 Jun 19 2022 usr/lib/dracut/hooks/initqueue/timeout/99-rootfallback.sh Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 usr/lib/dracut/hooks/mount Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 usr/lib/dracut/hooks/netroot Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 usr/lib/dracut/hooks/pre-mount Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 usr/lib/dracut/hooks/pre-pivot Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 490 Aug 18 10:10 usr/lib/dracut/hooks/pre-pivot/00-fips-boot.sh Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 472 Aug 18 10:10 usr/lib/dracut/hooks/pre-pivot/01-fips-noboot.sh Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 9387 Jun 19 2022 usr/lib/dracut/hooks/pre-pivot/85-write-ifcfg.sh Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 usr/lib/dracut/hooks/pre-shutdown Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 usr/lib/dracut/hooks/pre-trigger Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 usr/lib/dracut/hooks/pre-udev Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr--r-- 1 root root 443 Aug 18 10:10 usr/lib/dracut/hooks/pre-udev/01-fips-load-crypto.sh Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 1148 Jun 19 2022 usr/lib/dracut/hooks/pre-udev/50-ifname-genrules.sh Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 usr/lib/dracut/hooks/shutdown Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 usr/lib/dracut/hooks/shutdown-emergency Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 473 Aug 18 10:11 usr/lib/dracut/hostonly-files Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 315 Aug 18 10:11 usr/lib/dracut/modules.txt Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 0 Aug 18 10:11 usr/lib/dracut/need-initqueue Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 3 root root 0 Aug 18 10:11 usr/lib/firmware Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 3 root root 0 Aug 18 10:11 usr/lib/firmware/intel Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 usr/lib/firmware/intel/ish Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 406296 Aug 18 10:11 usr/lib/firmware/intel/ish/ish_lnlm.bin.xz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1222660 Aug 18 10:11 usr/lib/firmware/phanfw.bin.xz Jan 30 16:04:34 managed-node1 dracut[62968]: -rwxr-xr-x 1 root root 6695 Jun 19 2022 usr/lib/fs-lib.sh Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 504 Aug 18 10:11 usr/lib/initrd-release Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 6 root root 0 Aug 18 10:11 usr/lib/kbd Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 3 root root 0 Aug 18 10:11 usr/lib/kbd/consolefonts Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3019 Jan 9 2025 usr/lib/kbd/consolefonts/161.cp.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3086 Jan 9 2025 usr/lib/kbd/consolefonts/162.cp.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3069 Jan 9 2025 usr/lib/kbd/consolefonts/163.cp.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3136 Jan 9 2025 usr/lib/kbd/consolefonts/164.cp.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3247 Jan 9 2025 usr/lib/kbd/consolefonts/165.cp.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2948 Jan 9 2025 usr/lib/kbd/consolefonts/737.cp.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2914 Jan 9 2025 usr/lib/kbd/consolefonts/880.cp.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2759 Jan 9 2025 usr/lib/kbd/consolefonts/928.cp.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2159 Jan 9 2025 usr/lib/kbd/consolefonts/972.cp.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3791 Jan 9 2025 usr/lib/kbd/consolefonts/Agafari-12.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3839 Jan 9 2025 usr/lib/kbd/consolefonts/Agafari-14.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3904 Jan 9 2025 usr/lib/kbd/consolefonts/Agafari-16.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1985 Jan 9 2025 usr/lib/kbd/consolefonts/Cyr_a8x14.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2020 Jan 9 2025 usr/lib/kbd/consolefonts/Cyr_a8x16.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1751 Jan 9 2025 usr/lib/kbd/consolefonts/Cyr_a8x8.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 191 Jan 9 2025 usr/lib/kbd/consolefonts/ERRORS Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3702 Jan 9 2025 usr/lib/kbd/consolefonts/Goha-12.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3715 Jan 9 2025 usr/lib/kbd/consolefonts/Goha-14.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3779 Jan 9 2025 usr/lib/kbd/consolefonts/Goha-16.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3698 Jan 9 2025 usr/lib/kbd/consolefonts/GohaClassic-12.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3724 Jan 9 2025 usr/lib/kbd/consolefonts/GohaClassic-14.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3789 Jan 9 2025 usr/lib/kbd/consolefonts/GohaClassic-16.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2022 Jan 9 2025 usr/lib/kbd/consolefonts/Lat2-Terminus16.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3584 Jan 9 2025 usr/lib/kbd/consolefonts/LatArCyrHeb-08.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 4089 Jan 9 2025 usr/lib/kbd/consolefonts/LatArCyrHeb-14.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 4184 Jan 9 2025 usr/lib/kbd/consolefonts/LatArCyrHeb-16+.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 4096 Jan 9 2025 usr/lib/kbd/consolefonts/LatArCyrHeb-16.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 4260 Jan 9 2025 usr/lib/kbd/consolefonts/LatArCyrHeb-19.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 6004 Jan 9 2025 usr/lib/kbd/consolefonts/LatGrkCyr-12x22.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3813 Jan 9 2025 usr/lib/kbd/consolefonts/LatGrkCyr-8x16.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 4113 Jan 9 2025 usr/lib/kbd/consolefonts/LatKaCyrHeb-14.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1387 Jan 9 2025 usr/lib/kbd/consolefonts/Mik_8x16.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2709 Jan 9 2025 usr/lib/kbd/consolefonts/UniCyrExt_8x16.psf.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1929 Jan 9 2025 usr/lib/kbd/consolefonts/UniCyr_8x14.psf.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1996 Jan 9 2025 usr/lib/kbd/consolefonts/UniCyr_8x16.psf.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1728 Jan 9 2025 usr/lib/kbd/consolefonts/UniCyr_8x8.psf.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1290 Jan 9 2025 usr/lib/kbd/consolefonts/alt-8x14.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1488 Jan 9 2025 usr/lib/kbd/consolefonts/alt-8x16.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1084 Jan 9 2025 usr/lib/kbd/consolefonts/alt-8x8.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1415 Jan 9 2025 usr/lib/kbd/consolefonts/altc-8x16.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1114 Jan 9 2025 usr/lib/kbd/consolefonts/aply16.psf.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1593 Jan 9 2025 usr/lib/kbd/consolefonts/arm8.fnt.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1877 Jan 9 2025 usr/lib/kbd/consolefonts/cp1250.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1963 Jan 9 2025 usr/lib/kbd/consolefonts/cp850-8x14.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2025 Jan 9 2025 usr/lib/kbd/consolefonts/cp850-8x16.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1760 Jan 9 2025 usr/lib/kbd/consolefonts/cp850-8x8.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1106 Jan 9 2025 usr/lib/kbd/consolefonts/cp857.08.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1282 Jan 9 2025 usr/lib/kbd/consolefonts/cp857.14.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1342 Jan 9 2025 usr/lib/kbd/consolefonts/cp857.16.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1989 Jan 9 2025 usr/lib/kbd/consolefonts/cp865-8x14.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2050 Jan 9 2025 usr/lib/kbd/consolefonts/cp865-8x16.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1768 Jan 9 2025 usr/lib/kbd/consolefonts/cp865-8x8.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1329 Jan 9 2025 usr/lib/kbd/consolefonts/cp866-8x14.psf.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1351 Jan 9 2025 usr/lib/kbd/consolefonts/cp866-8x16.psf.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1112 Jan 9 2025 usr/lib/kbd/consolefonts/cp866-8x8.psf.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1212 Jan 9 2025 usr/lib/kbd/consolefonts/cybercafe.fnt.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2070 Jan 9 2025 usr/lib/kbd/consolefonts/cyr-sun16.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2076 Jan 9 2025 usr/lib/kbd/consolefonts/default8x16.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1879 Jan 9 2025 usr/lib/kbd/consolefonts/default8x9.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3412 Jan 9 2025 usr/lib/kbd/consolefonts/drdos8x14.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3486 Jan 9 2025 usr/lib/kbd/consolefonts/drdos8x16.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2558 Jan 9 2025 usr/lib/kbd/consolefonts/drdos8x6.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3065 Jan 9 2025 usr/lib/kbd/consolefonts/drdos8x8.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 4126 Jan 9 2025 usr/lib/kbd/consolefonts/eurlatgr.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1707 Jan 9 2025 usr/lib/kbd/consolefonts/gr737a-8x8.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1915 Jan 9 2025 usr/lib/kbd/consolefonts/gr737a-9x14.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1975 Jan 9 2025 usr/lib/kbd/consolefonts/gr737a-9x16.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1819 Jan 9 2025 usr/lib/kbd/consolefonts/gr737b-8x11.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2137 Jan 9 2025 usr/lib/kbd/consolefonts/gr737b-9x16-medieval.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1932 Jan 9 2025 usr/lib/kbd/consolefonts/gr737c-8x14.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 0 Jan 9 2025 usr/lib/kbd/consolefonts/gr737c-8x16.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1516 Jan 9 2025 usr/lib/kbd/consolefonts/gr737c-8x6.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1563 Jan 9 2025 usr/lib/kbd/consolefonts/gr737c-8x7.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1743 Jan 9 2025 usr/lib/kbd/consolefonts/gr737c-8x8.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 1970 Jan 9 2025 usr/lib/kbd/consolefonts/gr737d-8x16.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1737 Jan 9 2025 usr/lib/kbd/consolefonts/gr928-8x16-thin.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1774 Jan 9 2025 usr/lib/kbd/consolefonts/gr928-9x14.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1809 Jan 9 2025 usr/lib/kbd/consolefonts/gr928-9x16.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1784 Jan 9 2025 usr/lib/kbd/consolefonts/gr928a-8x14.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1824 Jan 9 2025 usr/lib/kbd/consolefonts/gr928a-8x16.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2057 Jan 9 2025 usr/lib/kbd/consolefonts/gr928b-8x14.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2100 Jan 9 2025 usr/lib/kbd/consolefonts/gr928b-8x16.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3162 Jan 9 2025 usr/lib/kbd/consolefonts/greek-polytonic.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 0 Jan 9 2025 usr/lib/kbd/consolefonts/iso01-12x22.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 85 Jan 9 2025 usr/lib/kbd/consolefonts/iso01.08.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 85 Jan 9 2025 usr/lib/kbd/consolefonts/iso01.14.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 85 Jan 9 2025 usr/lib/kbd/consolefonts/iso01.16.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 3211 Jan 9 2025 usr/lib/kbd/consolefonts/iso02-12x22.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 85 Jan 9 2025 usr/lib/kbd/consolefonts/iso02.08.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 85 Jan 9 2025 usr/lib/kbd/consolefonts/iso02.14.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 85 Jan 9 2025 usr/lib/kbd/consolefonts/iso02.16.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 85 Jan 9 2025 usr/lib/kbd/consolefonts/iso03.08.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 85 Jan 9 2025 usr/lib/kbd/consolefonts/iso03.14.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 85 Jan 9 2025 usr/lib/kbd/consolefonts/iso03.16.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 85 Jan 9 2025 usr/lib/kbd/consolefonts/iso04.08.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 85 Jan 9 2025 usr/lib/kbd/consolefonts/iso04.14.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 85 Jan 9 2025 usr/lib/kbd/consolefonts/iso04.16.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 85 Jan 9 2025 usr/lib/kbd/consolefonts/iso05.08.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 85 Jan 9 2025 usr/lib/kbd/consolefonts/iso05.14.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 85 Jan 9 2025 usr/lib/kbd/consolefonts/iso05.16.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 85 Jan 9 2025 usr/lib/kbd/consolefonts/iso06.08.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 85 Jan 9 2025 usr/lib/kbd/consolefonts/iso06.14.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 85 Jan 9 2025 usr/lib/kbd/consolefonts/iso06.16.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 85 Jan 9 2025 usr/lib/kbd/consolefonts/iso07.14.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 85 Jan 9 2025 usr/lib/kbd/consolefonts/iso07.16.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2082 Jan 9 2025 usr/lib/kbd/consolefonts/iso07u-16.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 85 Jan 9 2025 usr/lib/kbd/consolefonts/iso08.08.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 85 Jan 9 2025 usr/lib/kbd/consolefonts/iso08.14.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 85 Jan 9 2025 usr/lib/kbd/consolefonts/iso08.16.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 85 Jan 9 2025 usr/lib/kbd/consolefonts/iso09.08.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 85 Jan 9 2025 usr/lib/kbd/consolefonts/iso09.14.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 85 Jan 9 2025 usr/lib/kbd/consolefonts/iso09.16.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 86 Jan 9 2025 usr/lib/kbd/consolefonts/iso10.08.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 86 Jan 9 2025 usr/lib/kbd/consolefonts/iso10.14.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 86 Jan 9 2025 usr/lib/kbd/consolefonts/iso10.16.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1261 Jan 9 2025 usr/lib/kbd/consolefonts/koi8-14.psf.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1412 Jan 9 2025 usr/lib/kbd/consolefonts/koi8c-8x16.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1299 Jan 9 2025 usr/lib/kbd/consolefonts/koi8r-8x14.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1493 Jan 9 2025 usr/lib/kbd/consolefonts/koi8r-8x16.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1090 Jan 9 2025 usr/lib/kbd/consolefonts/koi8r-8x8.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1768 Jan 9 2025 usr/lib/kbd/consolefonts/koi8r.8x8.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2090 Jan 9 2025 usr/lib/kbd/consolefonts/koi8u_8x14.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2111 Jan 9 2025 usr/lib/kbd/consolefonts/koi8u_8x16.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1835 Jan 9 2025 usr/lib/kbd/consolefonts/koi8u_8x8.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1790 Jan 9 2025 usr/lib/kbd/consolefonts/lat0-08.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1868 Jan 9 2025 usr/lib/kbd/consolefonts/lat0-10.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1978 Jan 9 2025 usr/lib/kbd/consolefonts/lat0-12.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2011 Jan 9 2025 usr/lib/kbd/consolefonts/lat0-14.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2063 Jan 9 2025 usr/lib/kbd/consolefonts/lat0-16.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1989 Jan 9 2025 usr/lib/kbd/consolefonts/lat0-sun16.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1779 Jan 9 2025 usr/lib/kbd/consolefonts/lat1-08.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1861 Jan 9 2025 usr/lib/kbd/consolefonts/lat1-10.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1964 Jan 9 2025 usr/lib/kbd/consolefonts/lat1-12.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1998 Jan 9 2025 usr/lib/kbd/consolefonts/lat1-14.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2076 Jan 9 2025 usr/lib/kbd/consolefonts/lat1-16.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1748 Jan 9 2025 usr/lib/kbd/consolefonts/lat2-08.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1868 Jan 9 2025 usr/lib/kbd/consolefonts/lat2-10.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1910 Jan 9 2025 usr/lib/kbd/consolefonts/lat2-12.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1982 Jan 9 2025 usr/lib/kbd/consolefonts/lat2-14.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1991 Jan 9 2025 usr/lib/kbd/consolefonts/lat2-16.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1984 Jan 9 2025 usr/lib/kbd/consolefonts/lat2-sun16.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2018 Jan 9 2025 usr/lib/kbd/consolefonts/lat2a-16.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1821 Jan 9 2025 usr/lib/kbd/consolefonts/lat4-08.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1925 Jan 9 2025 usr/lib/kbd/consolefonts/lat4-10.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1995 Jan 9 2025 usr/lib/kbd/consolefonts/lat4-12.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2046 Jan 9 2025 usr/lib/kbd/consolefonts/lat4-14.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2151 Jan 9 2025 usr/lib/kbd/consolefonts/lat4-16+.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2057 Jan 9 2025 usr/lib/kbd/consolefonts/lat4-16.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2197 Jan 9 2025 usr/lib/kbd/consolefonts/lat4-19.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1820 Jan 9 2025 usr/lib/kbd/consolefonts/lat4a-08.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1927 Jan 9 2025 usr/lib/kbd/consolefonts/lat4a-10.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1995 Jan 9 2025 usr/lib/kbd/consolefonts/lat4a-12.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2048 Jan 9 2025 usr/lib/kbd/consolefonts/lat4a-14.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2152 Jan 9 2025 usr/lib/kbd/consolefonts/lat4a-16+.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2054 Jan 9 2025 usr/lib/kbd/consolefonts/lat4a-16.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2193 Jan 9 2025 usr/lib/kbd/consolefonts/lat4a-19.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1965 Jan 9 2025 usr/lib/kbd/consolefonts/lat5-12.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1995 Jan 9 2025 usr/lib/kbd/consolefonts/lat5-14.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2061 Jan 9 2025 usr/lib/kbd/consolefonts/lat5-16.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1838 Jan 9 2025 usr/lib/kbd/consolefonts/lat7-14.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2083 Jan 9 2025 usr/lib/kbd/consolefonts/lat7a-14.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1372 Jan 9 2025 usr/lib/kbd/consolefonts/lat7a-16.psf.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1099 Jan 9 2025 usr/lib/kbd/consolefonts/lat9-08.psf.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1175 Jan 9 2025 usr/lib/kbd/consolefonts/lat9-10.psf.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1290 Jan 9 2025 usr/lib/kbd/consolefonts/lat9-12.psf.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1308 Jan 9 2025 usr/lib/kbd/consolefonts/lat9-14.psf.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1364 Jan 9 2025 usr/lib/kbd/consolefonts/lat9-16.psf.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1783 Jan 9 2025 usr/lib/kbd/consolefonts/lat9u-08.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1865 Jan 9 2025 usr/lib/kbd/consolefonts/lat9u-10.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1976 Jan 9 2025 usr/lib/kbd/consolefonts/lat9u-12.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2009 Jan 9 2025 usr/lib/kbd/consolefonts/lat9u-14.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2061 Jan 9 2025 usr/lib/kbd/consolefonts/lat9u-16.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1771 Jan 9 2025 usr/lib/kbd/consolefonts/lat9v-08.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1853 Jan 9 2025 usr/lib/kbd/consolefonts/lat9v-10.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1973 Jan 9 2025 usr/lib/kbd/consolefonts/lat9v-12.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1999 Jan 9 2025 usr/lib/kbd/consolefonts/lat9v-14.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2054 Jan 9 2025 usr/lib/kbd/consolefonts/lat9v-16.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1787 Jan 9 2025 usr/lib/kbd/consolefonts/lat9w-08.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1870 Jan 9 2025 usr/lib/kbd/consolefonts/lat9w-10.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1989 Jan 9 2025 usr/lib/kbd/consolefonts/lat9w-12.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2015 Jan 9 2025 usr/lib/kbd/consolefonts/lat9w-14.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2069 Jan 9 2025 usr/lib/kbd/consolefonts/lat9w-16.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3803 Jan 9 2025 usr/lib/kbd/consolefonts/latarcyrheb-sun16.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 5171 Jan 9 2025 usr/lib/kbd/consolefonts/latarcyrheb-sun32.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3412 Jan 9 2025 usr/lib/kbd/consolefonts/pancyrillic.f16.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 usr/lib/kbd/consolefonts/partialfonts Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 693 Jan 9 2025 usr/lib/kbd/consolefonts/partialfonts/8859-1.a0-ff.08.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 805 Jan 9 2025 usr/lib/kbd/consolefonts/partialfonts/8859-1.a0-ff.14.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 812 Jan 9 2025 usr/lib/kbd/consolefonts/partialfonts/8859-1.a0-ff.16.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 710 Jan 9 2025 usr/lib/kbd/consolefonts/partialfonts/8859-10.a0-ff.08.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 818 Jan 9 2025 usr/lib/kbd/consolefonts/partialfonts/8859-10.a0-ff.14.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 825 Jan 9 2025 usr/lib/kbd/consolefonts/partialfonts/8859-10.a0-ff.16.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 713 Jan 9 2025 usr/lib/kbd/consolefonts/partialfonts/8859-2.a0-ff.08.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 803 Jan 9 2025 usr/lib/kbd/consolefonts/partialfonts/8859-2.a0-ff.14.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 800 Jan 9 2025 usr/lib/kbd/consolefonts/partialfonts/8859-2.a0-ff.16.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 659 Jan 9 2025 usr/lib/kbd/consolefonts/partialfonts/8859-3.a0-ff.08.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 744 Jan 9 2025 usr/lib/kbd/consolefonts/partialfonts/8859-3.a0-ff.14.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 752 Jan 9 2025 usr/lib/kbd/consolefonts/partialfonts/8859-3.a0-ff.16.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 707 Jan 9 2025 usr/lib/kbd/consolefonts/partialfonts/8859-4.a0-ff.08.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 819 Jan 9 2025 usr/lib/kbd/consolefonts/partialfonts/8859-4.a0-ff.14.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 822 Jan 9 2025 usr/lib/kbd/consolefonts/partialfonts/8859-4.a0-ff.16.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 709 Jan 9 2025 usr/lib/kbd/consolefonts/partialfonts/8859-5.a0-ff.08.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 800 Jan 9 2025 usr/lib/kbd/consolefonts/partialfonts/8859-5.a0-ff.14.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 825 Jan 9 2025 usr/lib/kbd/consolefonts/partialfonts/8859-5.a0-ff.16.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 436 Jan 9 2025 usr/lib/kbd/consolefonts/partialfonts/8859-6.a0-ff.08.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 498 Jan 9 2025 usr/lib/kbd/consolefonts/partialfonts/8859-6.a0-ff.14.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 499 Jan 9 2025 usr/lib/kbd/consolefonts/partialfonts/8859-6.a0-ff.16.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 648 Jan 9 2025 usr/lib/kbd/consolefonts/partialfonts/8859-7.a0-ff.08.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 803 Jan 9 2025 usr/lib/kbd/consolefonts/partialfonts/8859-7.a0-ff.14.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 818 Jan 9 2025 usr/lib/kbd/consolefonts/partialfonts/8859-7.a0-ff.16.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 501 Jan 9 2025 usr/lib/kbd/consolefonts/partialfonts/8859-8.a0-ff.08.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 565 Jan 9 2025 usr/lib/kbd/consolefonts/partialfonts/8859-8.a0-ff.14.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 576 Jan 9 2025 usr/lib/kbd/consolefonts/partialfonts/8859-8.a0-ff.16.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 699 Jan 9 2025 usr/lib/kbd/consolefonts/partialfonts/8859-9.a0-ff.08.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 791 Jan 9 2025 usr/lib/kbd/consolefonts/partialfonts/8859-9.a0-ff.14.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 814 Jan 9 2025 usr/lib/kbd/consolefonts/partialfonts/8859-9.a0-ff.16.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 650 Jan 9 2025 usr/lib/kbd/consolefonts/partialfonts/ascii.20-7f.08.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 749 Jan 9 2025 usr/lib/kbd/consolefonts/partialfonts/ascii.20-7f.14.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 778 Jan 9 2025 usr/lib/kbd/consolefonts/partialfonts/ascii.20-7f.16.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 330 Jan 9 2025 usr/lib/kbd/consolefonts/partialfonts/cp437.00-1f.08.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 371 Jan 9 2025 usr/lib/kbd/consolefonts/partialfonts/cp437.00-1f.14.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 380 Jan 9 2025 usr/lib/kbd/consolefonts/partialfonts/cp437.00-1f.16.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 36 Jan 9 2025 usr/lib/kbd/consolefonts/partialfonts/none.00-17.08.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 40 Jan 9 2025 usr/lib/kbd/consolefonts/partialfonts/none.00-17.14.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 40 Jan 9 2025 usr/lib/kbd/consolefonts/partialfonts/none.00-17.16.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2084 Jan 9 2025 usr/lib/kbd/consolefonts/ruscii_8x16.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2059 Jan 9 2025 usr/lib/kbd/consolefonts/ruscii_8x8.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3928 Jan 9 2025 usr/lib/kbd/consolefonts/solar24x32.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3287 Jan 9 2025 usr/lib/kbd/consolefonts/sun12x22.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1467 Jan 9 2025 usr/lib/kbd/consolefonts/t.fnt.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1515 Jan 9 2025 usr/lib/kbd/consolefonts/t850b.fnt.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1363 Jan 9 2025 usr/lib/kbd/consolefonts/tcvn8x16.psf.gz Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1891 Jan 9 2025 usr/lib/kbd/consolefonts/viscii10-8x16.psfu.gz Jan 30 16:04:34 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 usr/lib/kbd/consoletrans Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 9216 Jan 9 2025 usr/lib/kbd/consoletrans/8859-10_to_uni.trans Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 8994 Jan 9 2025 usr/lib/kbd/consoletrans/8859-13_to_uni.trans Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 9279 Jan 9 2025 usr/lib/kbd/consoletrans/8859-14_to_uni.trans Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 8839 Jan 9 2025 usr/lib/kbd/consoletrans/8859-15_to_uni.trans Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 8806 Jan 9 2025 usr/lib/kbd/consoletrans/8859-1_to_uni.trans Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 9034 Jan 9 2025 usr/lib/kbd/consoletrans/8859-2_to_uni.trans Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 8712 Jan 9 2025 usr/lib/kbd/consoletrans/8859-3_to_uni.trans Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 9006 Jan 9 2025 usr/lib/kbd/consoletrans/8859-4_to_uni.trans Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 8645 Jan 9 2025 usr/lib/kbd/consoletrans/8859-5_to_uni.trans Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 6418 Jan 9 2025 usr/lib/kbd/consoletrans/8859-6_to_uni.trans Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 8471 Jan 9 2025 usr/lib/kbd/consoletrans/8859-7_to_uni.trans Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 6566 Jan 9 2025 usr/lib/kbd/consoletrans/8859-8_to_uni.trans Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 8786 Jan 9 2025 usr/lib/kbd/consoletrans/8859-9_to_uni.trans Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1698 Jan 9 2025 usr/lib/kbd/consoletrans/baltic.trans Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 9307 Jan 9 2025 usr/lib/kbd/consoletrans/cp1250_to_uni.trans Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 9769 Jan 9 2025 usr/lib/kbd/consoletrans/cp1251_to_uni.trans Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1834 Jan 9 2025 usr/lib/kbd/consoletrans/cp437_to_iso01.trans Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 9529 Jan 9 2025 usr/lib/kbd/consoletrans/cp437_to_uni.trans Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 9568 Jan 9 2025 usr/lib/kbd/consoletrans/cp737_to_uni.trans Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 9500 Jan 9 2025 usr/lib/kbd/consoletrans/cp775_to_uni.trans Jan 30 16:04:34 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1172 Jan 9 2025 usr/lib/kbd/consoletrans/cp850_to_iso01.trans Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 9378 Jan 9 2025 usr/lib/kbd/consoletrans/cp850_to_uni.trans Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 9677 Jan 9 2025 usr/lib/kbd/consoletrans/cp852_to_uni.trans Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 9562 Jan 9 2025 usr/lib/kbd/consoletrans/cp853_to_uni.trans Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 9293 Jan 9 2025 usr/lib/kbd/consoletrans/cp855_to_uni.trans Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 9369 Jan 9 2025 usr/lib/kbd/consoletrans/cp857_to_uni.trans Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 9570 Jan 9 2025 usr/lib/kbd/consoletrans/cp860_to_uni.trans Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 9554 Jan 9 2025 usr/lib/kbd/consoletrans/cp861_to_uni.trans Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 9133 Jan 9 2025 usr/lib/kbd/consoletrans/cp862_to_uni.trans Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 9427 Jan 9 2025 usr/lib/kbd/consoletrans/cp863_to_uni.trans Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 9333 Jan 9 2025 usr/lib/kbd/consoletrans/cp864_to_uni.trans Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 9549 Jan 9 2025 usr/lib/kbd/consoletrans/cp865_to_uni.trans Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 9475 Jan 9 2025 usr/lib/kbd/consoletrans/cp866_to_uni.trans Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 9109 Jan 9 2025 usr/lib/kbd/consoletrans/cp869_to_uni.trans Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 8219 Jan 9 2025 usr/lib/kbd/consoletrans/cp874_to_uni.trans Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 5914 Jan 9 2025 usr/lib/kbd/consoletrans/iso02_to_cp1250.trans Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2455 Jan 9 2025 usr/lib/kbd/consoletrans/koi2alt Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 10105 Jan 9 2025 usr/lib/kbd/consoletrans/koi8-r_to_uni.trans Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 10067 Jan 9 2025 usr/lib/kbd/consoletrans/koi8-u_to_uni.trans Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1468 Jan 9 2025 usr/lib/kbd/consoletrans/koi8u2ruscii Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 256 Jan 9 2025 usr/lib/kbd/consoletrans/latin2u.trans Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 923 Jan 9 2025 usr/lib/kbd/consoletrans/null Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2122 Jan 9 2025 usr/lib/kbd/consoletrans/space Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1891 Jan 9 2025 usr/lib/kbd/consoletrans/trivial Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1698 Jan 9 2025 usr/lib/kbd/consoletrans/vga2iso Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1418 Jan 9 2025 usr/lib/kbd/consoletrans/viscii1.0_to_tcvn.trans Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1847 Jan 9 2025 usr/lib/kbd/consoletrans/viscii1.0_to_viscii1.1.trans Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1747 Jan 9 2025 usr/lib/kbd/consoletrans/zero Jan 30 16:04:35 managed-node1 dracut[62968]: drwxr-xr-x 4 root root 0 Aug 18 10:11 usr/lib/kbd/keymaps Jan 30 16:04:35 managed-node1 dracut[62968]: drwxr-xr-x 9 root root 0 Aug 18 10:11 usr/lib/kbd/keymaps/legacy Jan 30 16:04:35 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 usr/lib/kbd/keymaps/legacy/amiga Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2970 Jan 9 2025 usr/lib/kbd/keymaps/legacy/amiga/amiga-de.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1889 Jan 9 2025 usr/lib/kbd/keymaps/legacy/amiga/amiga-us.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 usr/lib/kbd/keymaps/legacy/atari Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2741 Jan 9 2025 usr/lib/kbd/keymaps/legacy/atari/atari-de.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2925 Jan 9 2025 usr/lib/kbd/keymaps/legacy/atari/atari-se.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2767 Jan 9 2025 usr/lib/kbd/keymaps/legacy/atari/atari-uk-falcon.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2985 Jan 9 2025 usr/lib/kbd/keymaps/legacy/atari/atari-us.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: drwxr-xr-x 13 root root 0 Aug 18 10:11 usr/lib/kbd/keymaps/legacy/i386 Jan 30 16:04:35 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 usr/lib/kbd/keymaps/legacy/i386/azerty Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 886 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/azerty/azerty.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 901 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/azerty/be-latin1.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 3 root root 0 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/azerty/fr-latin0.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 873 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/azerty/fr-latin1.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 3 root root 0 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/azerty/fr-latin9.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 842 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/azerty/fr-old.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 957 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/azerty/fr-pc.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 3 root root 5683 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/azerty/fr.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 343 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/azerty/wangbe.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1786 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/azerty/wangbe2.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 usr/lib/kbd/keymaps/legacy/i386/bepo Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 5617 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/bepo/fr-bepo-latin9.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1913 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/bepo/fr-bepo.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 usr/lib/kbd/keymaps/legacy/i386/carpalx Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 198 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/carpalx/carpalx-full.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 819 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/carpalx/carpalx.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 usr/lib/kbd/keymaps/legacy/i386/colemak Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1449 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/colemak/en-latin9.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 usr/lib/kbd/keymaps/legacy/i386/dvorak Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 145 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/dvorak/ANSI-dvorak.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1314 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/dvorak/dvorak-ca-fr.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1312 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/dvorak/dvorak-es.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1227 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/dvorak/dvorak-fr.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1205 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/dvorak/dvorak-l.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 904 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/dvorak/dvorak-la.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1045 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/dvorak/dvorak-no.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1098 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/dvorak/dvorak-programmer.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1202 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/dvorak/dvorak-r.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2598 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/dvorak/dvorak-ru.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1242 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/dvorak/dvorak-sv-a1.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1023 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/dvorak/dvorak-sv-a5.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 841 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/dvorak/dvorak-uk.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 890 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/dvorak/dvorak-ukp.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 782 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/dvorak/dvorak.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 usr/lib/kbd/keymaps/legacy/i386/fgGIod Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2258 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/fgGIod/tr_f-latin5.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1035 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/fgGIod/trf-fgGIod.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 usr/lib/kbd/keymaps/legacy/i386/include Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 230 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/include/applkey.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 438 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/include/azerty-layout.inc Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 65 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/include/backspace.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 278 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/include/compose.inc Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 126 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/include/ctrl.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 182 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/include/euro.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 253 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/include/euro1.inc Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 193 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/include/euro1.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 194 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/include/euro2.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 453 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/include/keypad.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 4353 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/include/linux-keys-bare.inc Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1148 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/include/linux-keys-extd.inc Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 746 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/include/linux-with-alt-and-altgr.inc Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3029 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/include/linux-with-modeshift-altgr.inc Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 146 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/include/linux-with-two-alt-keys.inc Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 456 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/include/qwerty-layout.inc Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 456 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/include/qwertz-layout.inc Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 426 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/include/unicode.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 116 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/include/windowkeys.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 usr/lib/kbd/keymaps/legacy/i386/neo Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 4702 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/neo/adnw.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 4694 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/neo/bone.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 4659 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/neo/koy.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 9439 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/neo/neo.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 4707 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/neo/neoqwertz.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 usr/lib/kbd/keymaps/legacy/i386/olpc Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 876 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/olpc/es-olpc.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 852 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/olpc/pt-olpc.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 usr/lib/kbd/keymaps/legacy/i386/qwerty Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2499 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/bashkir.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2981 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/bg-cp1251.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2328 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/bg-cp855.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 4627 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/bg_bds-cp1251.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 4622 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/bg_bds-utf8.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 4540 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/bg_pho-cp1251.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 4528 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/bg_pho-utf8.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2026 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/br-abnt.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1168 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/br-abnt2.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1106 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/br-latin1-abnt2.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 984 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/br-latin1-us.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2332 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/by-cp1251.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2854 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/by.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2330 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/bywin-cp1251.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1134 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/ca.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1174 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/cf.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2568 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/cz-cp1250.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2726 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/cz-lat2-prog.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2655 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/cz-lat2.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 8760 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/cz-qwerty.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2408 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/defkeymap.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2181 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/defkeymap_V1.0.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 962 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/dk-latin1.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 941 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/dk.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 987 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/emacs.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 775 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/emacs2.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1511 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/es-cp850.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1133 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/es.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 153 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/et-nodeadkeys.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1141 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/et.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1821 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/fa.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1463 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/fi.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2377 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/gr-pc.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3660 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/gr.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1313 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/hu101.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 12055 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/hypermap.m4 Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1445 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/il-heb.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1360 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/il-phonetic.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2410 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/il.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1762 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/is-latin1-us.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1216 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/is-latin1.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1054 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/it-ibm.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1230 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/it.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2046 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/it2.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 779 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/jp106.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2495 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/kazakh.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 0 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/ko.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3387 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/ky_alt_sh-UTF-8.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2420 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/kyrgyz.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1058 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/la-latin1.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2313 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/lt.baltic.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 6095 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/lt.l4.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 6172 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/lt.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 542 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/lv-tilde.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1285 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/lv.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3147 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/mk-cp1251.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3201 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/mk-utf.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3164 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/mk.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1745 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/mk0.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1075 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/nl.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1220 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/nl2.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1714 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/no-latin1.doc Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1533 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/no-latin1.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 835 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/no.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 387 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/pc110.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1306 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/pl.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1234 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/pl1.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2247 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/pl2.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2327 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/pl3.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1165 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/pl4.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 989 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/pt-latin1.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 0 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/pt-latin9.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 930 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/pt.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1618 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/ro.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1435 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/ro_std.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2316 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/ru-cp1251.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3055 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/ru-ms.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3046 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/ru-yawerty.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3779 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/ru.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3499 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/ru1.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2591 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/ru2.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3383 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/ru3.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3378 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/ru4.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2571 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/ru_win.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3487 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/ruwin_alt-CP1251.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3350 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/ruwin_alt-KOI8-R.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3476 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/ruwin_alt-UTF-8.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3456 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/ruwin_alt_sh-UTF-8.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3373 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/ruwin_cplk-CP1251.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3236 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/ruwin_cplk-KOI8-R.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3363 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/ruwin_cplk-UTF-8.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3398 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/ruwin_ct_sh-CP1251.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3262 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/ruwin_ct_sh-KOI8-R.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3389 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/ruwin_ct_sh-UTF-8.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3344 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/ruwin_ctrl-CP1251.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3207 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/ruwin_ctrl-KOI8-R.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3334 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/ruwin_ctrl-UTF-8.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 177 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/se-fi-ir209.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 189 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/se-fi-lat6.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2432 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/se-ir209.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2540 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/se-lat6.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 0 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/se-latin1.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2614 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/sk-prog-qwerty.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1868 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/sk-qwerty.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 0 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/sr-cy.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 3307 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/sr-latin.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 1121 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/sv-latin1.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3490 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/tj_alt-UTF8.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1166 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/tr_q-latin5.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1132 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/tralt.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1578 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/trf.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1126 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/trq.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3374 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/ttwin_alt-UTF-8.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3384 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/ttwin_cplk-UTF-8.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3367 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/ttwin_ct_sh-UTF-8.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3348 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/ttwin_ctrl-UTF-8.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 4232 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/ua-cp1251.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 6958 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/ua-utf-ws.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 6866 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/ua-utf.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 6932 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/ua-ws.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 6853 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/ua.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 765 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/uk.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2549 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/us-acentos.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 666 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/us.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 117 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwerty/us1.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 usr/lib/kbd/keymaps/legacy/i386/qwertz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1286 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwertz/croat.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2539 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwertz/cz-us-qwertz.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 8706 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwertz/cz.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 270 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwertz/de-latin1-nodeadkeys.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 898 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwertz/de-latin1.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 347 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwertz/de-mobii.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 847 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwertz/de.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 294 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwertz/de_CH-latin1.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2377 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwertz/de_alt_UTF-8.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 973 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwertz/fr_CH-latin1.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 873 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwertz/fr_CH.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 1252 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwertz/hu.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 942 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwertz/sg-latin1-lk450.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 992 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwertz/sg-latin1.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 924 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwertz/sg.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3161 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwertz/sg.map.sg-decimal-separator Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2611 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwertz/sk-prog-qwertz.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2690 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwertz/sk-qwertz.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 0 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwertz/slovene.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 1191 Jan 9 2025 usr/lib/kbd/keymaps/legacy/i386/qwertz/sr-latin.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 usr/lib/kbd/keymaps/legacy/include Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 517 Jan 9 2025 usr/lib/kbd/keymaps/legacy/include/compose.8859_7 Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 23 Jan 9 2025 usr/lib/kbd/keymaps/legacy/include/compose.8859_8 Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 6030 Jan 9 2025 usr/lib/kbd/keymaps/legacy/include/compose.latin Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3889 Jan 9 2025 usr/lib/kbd/keymaps/legacy/include/compose.latin1 Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 3465 Jan 9 2025 usr/lib/kbd/keymaps/legacy/include/compose.latin2 Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2037 Jan 9 2025 usr/lib/kbd/keymaps/legacy/include/compose.latin3 Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 2897 Jan 9 2025 usr/lib/kbd/keymaps/legacy/include/compose.latin4 Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 1 root root 8402 Jan 9 2025 usr/lib/kbd/keymaps/legacy/include/vim-compose.latin1 Jan 30 16:04:35 managed-node1 dracut[62968]: drwxr-xr-x 4 root root 0 Aug 18 10:11 usr/lib/kbd/keymaps/legacy/mac Jan 30 16:04:35 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 usr/lib/kbd/keymaps/legacy/mac/all Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 0 Jan 9 2025 usr/lib/kbd/keymaps/legacy/mac/all/apple-a1048-sv.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 0 Jan 9 2025 usr/lib/kbd/keymaps/legacy/mac/all/apple-a1243-sv-fn-reverse.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 0 Jan 9 2025 usr/lib/kbd/keymaps/legacy/mac/all/apple-a1243-sv.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 0 Jan 9 2025 usr/lib/kbd/keymaps/legacy/mac/all/apple-internal-0x0253-sv-fn-reverse.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 0 Jan 9 2025 usr/lib/kbd/keymaps/legacy/mac/all/apple-internal-0x0253-sv.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 0 Jan 9 2025 usr/lib/kbd/keymaps/legacy/mac/all/mac-be.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 0 Jan 9 2025 usr/lib/kbd/keymaps/legacy/mac/all/mac-de-latin1-nodeadkeys.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 0 Jan 9 2025 usr/lib/kbd/keymaps/legacy/mac/all/mac-de-latin1.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 0 Jan 9 2025 usr/lib/kbd/keymaps/legacy/mac/all/mac-de_CH.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 0 Jan 9 2025 usr/lib/kbd/keymaps/legacy/mac/all/mac-dk-latin1.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 0 Jan 9 2025 usr/lib/kbd/keymaps/legacy/mac/all/mac-dvorak.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 0 Jan 9 2025 usr/lib/kbd/keymaps/legacy/mac/all/mac-es.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 0 Jan 9 2025 usr/lib/kbd/keymaps/legacy/mac/all/mac-fi-latin1.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 0 Jan 9 2025 usr/lib/kbd/keymaps/legacy/mac/all/mac-fr.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 0 Jan 9 2025 usr/lib/kbd/keymaps/legacy/mac/all/mac-fr_CH-latin1.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 0 Jan 9 2025 usr/lib/kbd/keymaps/legacy/mac/all/mac-it.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 0 Jan 9 2025 usr/lib/kbd/keymaps/legacy/mac/all/mac-no-latin1.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 0 Jan 9 2025 usr/lib/kbd/keymaps/legacy/mac/all/mac-pl.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 0 Jan 9 2025 usr/lib/kbd/keymaps/legacy/mac/all/mac-pt-latin1.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 0 Jan 9 2025 usr/lib/kbd/keymaps/legacy/mac/all/mac-se.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 0 Jan 9 2025 usr/lib/kbd/keymaps/legacy/mac/all/mac-template.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 0 Jan 9 2025 usr/lib/kbd/keymaps/legacy/mac/all/mac-uk.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 0 Jan 9 2025 usr/lib/kbd/keymaps/legacy/mac/all/mac-us.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 usr/lib/kbd/keymaps/legacy/mac/include Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 0 Jan 9 2025 usr/lib/kbd/keymaps/legacy/mac/include/apple-a1048-base.inc Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 0 Jan 9 2025 usr/lib/kbd/keymaps/legacy/mac/include/apple-a1243-fn-reverse.inc Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 0 Jan 9 2025 usr/lib/kbd/keymaps/legacy/mac/include/apple-a1243-fn.inc Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 0 Jan 9 2025 usr/lib/kbd/keymaps/legacy/mac/include/mac-azerty-layout.inc Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 0 Jan 9 2025 usr/lib/kbd/keymaps/legacy/mac/include/mac-euro.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 0 Jan 9 2025 usr/lib/kbd/keymaps/legacy/mac/include/mac-euro2.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 0 Jan 9 2025 usr/lib/kbd/keymaps/legacy/mac/include/mac-linux-keys-bare.inc Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 0 Jan 9 2025 usr/lib/kbd/keymaps/legacy/mac/include/mac-qwerty-layout.inc Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 0 Jan 9 2025 usr/lib/kbd/keymaps/legacy/mac/include/mac-qwertz-layout.inc Jan 30 16:04:35 managed-node1 dracut[62968]: drwxr-xr-x 4 root root 0 Aug 18 10:11 usr/lib/kbd/keymaps/legacy/ppc Jan 30 16:04:35 managed-node1 dracut[62968]: drwxr-xr-x 2 root root 0 Aug 18 10:11 usr/lib/kbd/keymaps/legacy/ppc/all Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 1307 Jan 9 2025 usr/lib/kbd/keymaps/legacy/ppc/all/apple-a1048-sv.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 323 Jan 9 2025 usr/lib/kbd/keymaps/legacy/ppc/all/apple-a1243-sv-fn-reverse.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 406 Jan 9 2025 usr/lib/kbd/keymaps/legacy/ppc/all/apple-a1243-sv.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 352 Jan 9 2025 usr/lib/kbd/keymaps/legacy/ppc/all/apple-internal-0x0253-sv-fn-reverse.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 347 Jan 9 2025 usr/lib/kbd/keymaps/legacy/ppc/all/apple-internal-0x0253-sv.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 501 Jan 9 2025 usr/lib/kbd/keymaps/legacy/ppc/all/mac-be.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 288 Jan 9 2025 usr/lib/kbd/keymaps/legacy/ppc/all/mac-de-latin1-nodeadkeys.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 904 Jan 9 2025 usr/lib/kbd/keymaps/legacy/ppc/all/mac-de-latin1.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 767 Jan 9 2025 usr/lib/kbd/keymaps/legacy/ppc/all/mac-de_CH.map.gz Jan 30 16:04:35 managed-node1 dracut[62968]: -rw-r--r-- 2 root root 639 Jan 9 2025 usr/lib/kbd/keymaps/legacy/ppc/all/mac-dk-latin1.map.gz