ansible-playbook [core 2.16.14] config file = None configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /usr/local/lib/python3.12/site-packages/ansible ansible collection location = /tmp/collections-wk5 executable location = /usr/local/bin/ansible-playbook python version = 3.12.1 (main, Feb 21 2024, 14:18:26) [GCC 8.5.0 20210514 (Red Hat 8.5.0-21)] (/usr/bin/python3.12) jinja version = 3.1.6 libyaml = True No config file found; using defaults running playbook inside collection fedora.linux_system_roles statically imported: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/tests/metrics/get_services_state.yml statically imported: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/tests/metrics/restore_services_state.yml Skipping callback 'debug', as we already have a stdout callback. Skipping callback 'json', as we already have a stdout callback. Skipping callback 'jsonl', as we already have a stdout callback. Skipping callback 'default', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. PLAYBOOK: tests_verify_basic.yml *********************************************** 2 plays in /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_verify_basic.yml PLAY [all] ********************************************************************* TASK [Include vault variables] ************************************************* task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_verify_basic.yml:5 Tuesday 24 June 2025 12:59:30 -0400 (0:00:00.017) 0:00:00.017 ********** ok: [managed-node3] => { "ansible_facts": { "pcptest_pw": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n65343431623161346664373330646165636437656265656632613961363839303132393064663934\n3137396633373562393466633037356533326566343338350a386238333034336162333932313162\n62643937336534356131376134303463306466316433366636643562633637376336653034646334\n3063663466333735390a333330366461386166633233373133326237323663333831653232646566\n3363\n" } }, "ansible_included_var_files": [ "/tmp/metrics-UYx/tests/vars/vault-variables.yml" ], "changed": false } PLAY [Test basic services of the role are installed and running] *************** TASK [Gathering Facts] ********************************************************* task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_verify_basic.yml:9 Tuesday 24 June 2025 12:59:30 -0400 (0:00:00.016) 0:00:00.034 ********** ok: [managed-node3] TASK [Get initial state of services] ******************************************* task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/tests/metrics/get_services_state.yml:3 Tuesday 24 June 2025 12:59:31 -0400 (0:00:01.118) 0:00:01.153 ********** ok: [managed-node3] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "auto-cpufreq.service": { "name": "auto-cpufreq.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "avahi-daemon.service": { "name": "avahi-daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "chrony-dnssrv@.service": { "name": "chrony-dnssrv@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "static" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus-org.freedesktop.portable1.service": { "name": "dbus-org.freedesktop.portable1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "running", "status": "static" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "halt-local.service": { "name": "halt-local.service", "source": "systemd", "state": "inactive", "status": "static" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "import-state.service": { "name": "import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "iprdump.service": { "name": "iprdump.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprinit.service": { "name": "iprinit.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprupdate.service": { "name": "iprupdate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "loadmodules.service": { "name": "loadmodules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "messagebus.service": { "name": "messagebus.service", "source": "systemd", "state": "active", "status": "static" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-convert.service": { "name": "nfs-convert.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-halt.service": { "name": "plymouth-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-kexec.service": { "name": "plymouth-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-poweroff.service": { "name": "plymouth-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-quit.service": { "name": "plymouth-quit.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-read-write.service": { "name": "plymouth-read-write.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-reboot.service": { "name": "plymouth-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-switch-root-initramfs.service": { "name": "plymouth-switch-root-initramfs.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-switch-root.service": { "name": "plymouth-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmcd.service": { "name": "pmcd.service", "source": "systemd", "state": "running", "status": "enabled" }, "pmfind.service": { "name": "pmfind.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pmie.service": { "name": "pmie.service", "source": "systemd", "state": "running", "status": "enabled" }, "pmie_check.service": { "name": "pmie_check.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmie_daily.service": { "name": "pmie_daily.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmie_farm.service": { "name": "pmie_farm.service", "source": "systemd", "state": "running", "status": "disabled" }, "pmie_farm_check.service": { "name": "pmie_farm_check.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmlogger.service": { "name": "pmlogger.service", "source": "systemd", "state": "running", "status": "enabled" }, "pmlogger_check.service": { "name": "pmlogger_check.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmlogger_daily.service": { "name": "pmlogger_daily.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmlogger_daily_report.service": { "name": "pmlogger_daily_report.service", "source": "systemd", "state": "inactive", "status": "static" }, "pmlogger_farm.service": { "name": "pmlogger_farm.service", "source": "systemd", "state": "running", "status": "disabled" }, "pmlogger_farm_check.service": { "name": "pmlogger_farm_check.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmproxy.service": { "name": "pmproxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "power-profiles-daemon.service": { "name": "power-profiles-daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "snapd.seeded.service": { "name": "snapd.seeded.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "active", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-plymouth.service": { "name": "systemd-ask-password-plymouth.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-portabled.service": { "name": "systemd-portabled.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-resolved.service": { "name": "systemd-resolved.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "masked" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "tcsd.service": { "name": "tcsd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "timedatex.service": { "name": "timedatex.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "tlp.service": { "name": "tlp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "tuned.service": { "name": "tuned.service", "source": "systemd", "state": "running", "status": "enabled" }, "unbound-anchor.service": { "name": "unbound-anchor.service", "source": "systemd", "state": "stopped", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "yppasswdd.service": { "name": "yppasswdd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypserv.service": { "name": "ypserv.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypxfrd.service": { "name": "ypxfrd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "zabbix-agent.service": { "name": "zabbix-agent.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [Run the role] ************************************************************ task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_verify_basic.yml:23 Tuesday 24 June 2025 12:59:32 -0400 (0:00:01.815) 0:00:02.968 ********** TASK [fedora.linux_system_roles.metrics : Ensure ansible_facts used by role] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:3 Tuesday 24 June 2025 12:59:33 -0400 (0:00:00.045) 0:00:03.014 ********** skipping: [managed-node3] => { "changed": false, "false_condition": "__metrics_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Run systemctl] *********************** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:11 Tuesday 24 June 2025 12:59:33 -0400 (0:00:00.043) 0:00:03.058 ********** ok: [managed-node3] => { "changed": false, "cmd": [ "systemctl", "is-system-running" ], "delta": "0:00:00.007297", "end": "2025-06-24 12:59:33.466528", "failed_when_result": false, "rc": 0, "start": "2025-06-24 12:59:33.459231" } STDOUT: running TASK [fedora.linux_system_roles.metrics : Require installed systemd] *********** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:18 Tuesday 24 June 2025 12:59:33 -0400 (0:00:00.457) 0:00:03.515 ********** skipping: [managed-node3] => { "changed": false, "false_condition": "\"No such file or directory\" in __is_system_running.msg | d(\"\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Set flag to indicate that systemd runtime operations are available] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:23 Tuesday 24 June 2025 12:59:33 -0400 (0:00:00.051) 0:00:03.567 ********** ok: [managed-node3] => { "ansible_facts": { "__metrics_is_booted": true }, "changed": false } TASK [fedora.linux_system_roles.metrics : Add Elasticsearch to metrics domain list] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:28 Tuesday 24 June 2025 12:59:33 -0400 (0:00:00.025) 0:00:03.592 ********** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_from_elasticsearch | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Add SQL Server to metrics domain list] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:33 Tuesday 24 June 2025 12:59:33 -0400 (0:00:00.036) 0:00:03.629 ********** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_from_mssql | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Add Postfix to metrics domain list] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:38 Tuesday 24 June 2025 12:59:33 -0400 (0:00:00.034) 0:00:03.663 ********** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_from_postfix | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Add bpftrace to metrics domain list] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:43 Tuesday 24 June 2025 12:59:33 -0400 (0:00:00.048) 0:00:03.712 ********** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_from_bpftrace | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Setup metrics access for roles] ****** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:48 Tuesday 24 June 2025 12:59:33 -0400 (0:00:00.040) 0:00:03.753 ********** ok: [managed-node3] => { "ansible_facts": { "__metrics_accounts": [ { "saslpassword": "metrics", "sasluser": "metrics", "user": "metrics" } ] }, "changed": false } TASK [Configure Elasticsearch metrics] ***************************************** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:55 Tuesday 24 June 2025 12:59:33 -0400 (0:00:00.049) 0:00:03.803 ********** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_from_elasticsearch | d(false) | bool or metrics_into_elasticsearch | d(false) | bool\n", "skip_reason": "Conditional result was False" } TASK [Configure Spark metrics] ************************************************* task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:70 Tuesday 24 June 2025 12:59:33 -0400 (0:00:00.033) 0:00:03.837 ********** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_from_spark | d(false) | bool or metrics_into_spark | d(false) | bool\n", "skip_reason": "Conditional result was False" } TASK [Configure SQL Server metrics.] ******************************************* task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:82 Tuesday 24 June 2025 12:59:33 -0400 (0:00:00.036) 0:00:03.873 ********** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_from_mssql | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [Configure Postfix metrics.] ********************************************** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:90 Tuesday 24 June 2025 12:59:33 -0400 (0:00:00.033) 0:00:03.907 ********** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_from_postfix | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [Setup bpftrace metrics.] ************************************************* task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:98 Tuesday 24 June 2025 12:59:33 -0400 (0:00:00.033) 0:00:03.941 ********** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_from_bpftrace | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [Setup metric querying service.] ****************************************** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:107 Tuesday 24 June 2025 12:59:33 -0400 (0:00:00.032) 0:00:03.973 ********** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_query_service | bool", "skip_reason": "Conditional result was False" } TASK [Setup metric collection service.] **************************************** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:113 Tuesday 24 June 2025 12:59:34 -0400 (0:00:00.031) 0:00:04.005 ********** TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Set platform/version specific variables] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/main.yml:4 Tuesday 24 June 2025 12:59:34 -0400 (0:00:00.052) 0:00:04.058 ********** ok: [managed-node3] => (item=/tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/default.yml) => { "ansible_facts": {}, "ansible_included_var_files": [ "/tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/default.yml" ], "ansible_loop_var": "item", "changed": false, "item": "/tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/default.yml" } ok: [managed-node3] => (item=/tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/RedHat.yml) => { "ansible_facts": { "__pcp_pmcd_defaults_path": "/etc/sysconfig/pmcd", "__pcp_pmlogger_defaults_path": "/etc/sysconfig/pmlogger", "__pcp_pmlogger_timers_path": "/etc/sysconfig/pmlogger_timers", "__pcp_pmproxy_defaults_path": "/etc/sysconfig/pmproxy" }, "ansible_included_var_files": [ "/tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "/tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/RedHat.yml" } skipping: [managed-node3] => (item=/tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item is file", "item": "/tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node3] => (item=/tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/CentOS_8.yml) => { "ansible_facts": { "__pcp_packages_extra": [ "pcp-zeroconf" ], "__pcp_sasl_mechlist": "scram-sha-256", "__pcp_sasl_packages": [ "cyrus-sasl-lib", "cyrus-sasl-scram" ] }, "ansible_included_var_files": [ "/tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "/tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/CentOS_8.yml" } ok: [managed-node3] => (item=/tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/CentOS_8.yml) => { "ansible_facts": { "__pcp_packages_extra": [ "pcp-zeroconf" ], "__pcp_sasl_mechlist": "scram-sha-256", "__pcp_sasl_packages": [ "cyrus-sasl-lib", "cyrus-sasl-scram" ] }, "ansible_included_var_files": [ "/tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "/tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/vars/CentOS_8.yml" } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Check if system is ostree] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/main.yml:18 Tuesday 24 June 2025 12:59:34 -0400 (0:00:00.048) 0:00:04.107 ********** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Set flag to indicate system is ostree] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/main.yml:23 Tuesday 24 June 2025 12:59:34 -0400 (0:00:00.444) 0:00:04.552 ********** ok: [managed-node3] => { "ansible_facts": { "__ansible_pcp_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Run systemctl] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/main.yml:30 Tuesday 24 June 2025 12:59:34 -0400 (0:00:00.024) 0:00:04.577 ********** ok: [managed-node3] => { "changed": false, "cmd": [ "systemctl", "is-system-running" ], "delta": "0:00:00.007748", "end": "2025-06-24 12:59:34.881201", "failed_when_result": false, "rc": 0, "start": "2025-06-24 12:59:34.873453" } STDOUT: running TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Require installed systemd] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/main.yml:38 Tuesday 24 June 2025 12:59:34 -0400 (0:00:00.350) 0:00:04.927 ********** skipping: [managed-node3] => { "changed": false, "false_condition": "\"No such file or directory\" in __is_system_running.msg | d(\"\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Set flag to indicate that systemd runtime operations are available] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/main.yml:43 Tuesday 24 June 2025 12:59:34 -0400 (0:00:00.043) 0:00:04.970 ********** ok: [managed-node3] => { "ansible_facts": { "__pcp_is_booted": true }, "changed": false } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Install Performance Co-Pilot packages] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/main.yml:47 Tuesday 24 June 2025 12:59:34 -0400 (0:00:00.023) 0:00:04.994 ********** ok: [managed-node3] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Install authentication packages] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/main.yml:53 Tuesday 24 June 2025 12:59:37 -0400 (0:00:02.989) 0:00:07.984 ********** ok: [managed-node3] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Include pmcd] **** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/main.yml:62 Tuesday 24 June 2025 12:59:40 -0400 (0:00:02.907) 0:00:10.891 ********** included: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml for managed-node3 TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : List optional metric collection agents to be enabled] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:4 Tuesday 24 June 2025 12:59:40 -0400 (0:00:00.064) 0:00:10.956 ********** skipping: [managed-node3] => { "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Extract metric collection configuration file content] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:9 Tuesday 24 June 2025 12:59:40 -0400 (0:00:00.040) 0:00:10.996 ********** ok: [managed-node3] => { "changed": false, "cmd": [ "cat", "/etc/pcp/pmcd/pmcd.conf" ], "delta": "0:00:01.003785", "end": "2025-06-24 12:59:42.299268", "rc": 0, "start": "2025-06-24 12:59:41.295483" } STDOUT: # # Name Id IPC IPC Params File/Cmd # Performance Metrics Domain Specifications # This file is automatically generated during the build root 1 pipe binary /var/lib/pcp/pmdas/root/pmdaroot pmcd 2 dso pmcd_init /var/lib/pcp/pmdas/pmcd/pmda_pmcd.so proc 3 pipe binary /var/lib/pcp/pmdas/proc/pmdaproc -d 3 pmproxy 4 dso pmproxy_init /var/lib/pcp/pmdas/mmv/pmda_mmv.so xfs 11 pipe binary /var/lib/pcp/pmdas/xfs/pmdaxfs -d 11 linux 60 pipe binary /var/lib/pcp/pmdas/linux/pmdalinux nfsclient 62 pipe binary python3 /var/lib/pcp/pmdas/nfsclient/pmdanfsclient.python mmv 70 dso mmv_init /var/lib/pcp/pmdas/mmv/pmda_mmv.so kvm 95 pipe binary /var/lib/pcp/pmdas/kvm/pmdakvm -d 95 jbd2 122 dso jbd2_init /var/lib/pcp/pmdas/jbd2/pmda_jbd2.so dm 129 pipe binary /var/lib/pcp/pmdas/dm/pmdadm -d 129 openmetrics 144 pipe binary notready python3 /var/lib/pcp/pmdas/openmetrics/pmdaopenmetrics.python [access] disallow ".*" : store; disallow ":*" : store; allow "local:*" : all; TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure optional metric collection agents are enabled] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:14 Tuesday 24 June 2025 12:59:42 -0400 (0:00:01.352) 0:00:12.348 ********** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure explicit metric label path exists] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:23 Tuesday 24 June 2025 12:59:42 -0400 (0:00:00.037) 0:00:12.386 ********** ok: [managed-node3] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/pcp/labels", "secontext": "system_u:object_r:etc_t:s0", "size": 45, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure implicit metric label path exists] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:31 Tuesday 24 June 2025 12:59:42 -0400 (0:00:00.496) 0:00:12.883 ********** ok: [managed-node3] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/pcp/labels/optional", "secontext": "system_u:object_r:etc_t:s0", "size": 29, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure any explicit metric labels are configured] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:39 Tuesday 24 June 2025 12:59:43 -0400 (0:00:00.368) 0:00:13.251 ********** ok: [managed-node3] => { "changed": false, "checksum": "5f36b2ea290645ee34d943220a14b54ee5ea5be5", "dest": "/etc/pcp/labels/ansible-managed", "gid": 0, "group": "root", "mode": "0644", "owner": "root", "path": "/etc/pcp/labels/ansible-managed", "secontext": "system_u:object_r:etc_t:s0", "size": 3, "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure any implicit metric labels are configured] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:46 Tuesday 24 June 2025 12:59:43 -0400 (0:00:00.658) 0:00:13.909 ********** ok: [managed-node3] => { "changed": false, "checksum": "5f36b2ea290645ee34d943220a14b54ee5ea5be5", "dest": "/etc/pcp/labels/optional/ansible-managed", "gid": 0, "group": "root", "mode": "0644", "owner": "root", "path": "/etc/pcp/labels/optional/ansible-managed", "secontext": "system_u:object_r:etc_t:s0", "size": 3, "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric collector is configured] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:53 Tuesday 24 June 2025 12:59:44 -0400 (0:00:00.659) 0:00:14.568 ********** ok: [managed-node3] => { "changed": false, "checksum": "7518789c091387cd9c322e1a8fa8aad21d4efbd3", "dest": "/etc/sysconfig/pmcd", "gid": 0, "group": "root", "mode": "0644", "owner": "root", "path": "/etc/sysconfig/pmcd", "secontext": "system_u:object_r:etc_t:s0", "size": 1627, "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric collector system accounts are configured] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:60 Tuesday 24 June 2025 12:59:45 -0400 (0:00:00.634) 0:00:15.203 ********** ok: [managed-node3] => (item={'user': 'metrics', 'sasluser': 'metrics', 'saslpassword': 'metrics'}) => { "ansible_loop_var": "item", "append": false, "changed": false, "comment": "", "group": 990, "home": "/home/metrics", "item": { "saslpassword": "metrics", "sasluser": "metrics", "user": "metrics" }, "move_home": false, "name": "metrics", "shell": "/bin/bash", "state": "present", "uid": 993 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric collector SASL accounts are configured] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:68 Tuesday 24 June 2025 12:59:45 -0400 (0:00:00.536) 0:00:15.739 ********** ok: [managed-node3] => (item={'user': 'metrics', 'sasluser': 'metrics', 'saslpassword': 'metrics'}) => { "ansible_loop_var": "item", "changed": false, "cmd": "set -eu\nif set -o | grep -q pipefail; then\n set -o pipefail # pipefail not supported on debian, some ubuntu\nfi\nif ! sasldblistusers2 -f \"/etc/pcp/passwd.db\" | grep -q \"^metrics@\"; then\n echo \"Creating new metrics user in /etc/pcp/passwd.db\"\n echo \"metrics\" | saslpasswd2 -a pmcd \"metrics\"\n chown root:pcp \"/etc/pcp/passwd.db\"\n chmod 640 \"/etc/pcp/passwd.db\"\nfi\n", "delta": "0:00:00.007651", "end": "2025-06-24 12:59:46.058899", "item": { "saslpassword": "metrics", "sasluser": "metrics", "user": "metrics" }, "rc": 0, "start": "2025-06-24 12:59:46.051248" } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric collector authentication is configured] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:86 Tuesday 24 June 2025 12:59:46 -0400 (0:00:00.368) 0:00:16.108 ********** ok: [managed-node3] => { "changed": false, "checksum": "615d2de55ab86108da0c7e6b64988fecb4169771", "dest": "/etc/sasl2/pmcd.conf", "gid": 0, "group": "root", "mode": "0644", "owner": "root", "path": "/etc/sasl2/pmcd.conf", "secontext": "system_u:object_r:etc_t:s0", "size": 998, "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Set variable to do pmcd restart if needed] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:94 Tuesday 24 June 2025 12:59:46 -0400 (0:00:00.654) 0:00:16.763 ********** ok: [managed-node3] => { "ansible_facts": { "__pcp_restart_pmcd": false }, "changed": false } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Report performance metric collector restart state] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:99 Tuesday 24 June 2025 12:59:46 -0400 (0:00:00.037) 0:00:16.801 ********** ok: [managed-node3] => {} MSG: ['optional_agents: False', 'explicit_labels: False', 'implicit_labels: False', 'defaults_config: False', 'authentication: False', 'restart_pmcd: False'] TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric collector is running and enabled on boot] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:110 Tuesday 24 June 2025 12:59:46 -0400 (0:00:00.052) 0:00:16.853 ********** ok: [managed-node3] => { "changed": false, "enabled": true, "name": "pmcd", "state": "started", "status": { "ActiveEnterTimestamp": "Tue 2025-06-24 12:59:09 EDT", "ActiveEnterTimestampMonotonic": "389230875", "ActiveExitTimestamp": "Tue 2025-06-24 12:59:08 EDT", "ActiveExitTimestampMonotonic": "388150858", "ActiveState": "active", "After": "basic.target system.slice avahi-daemon.service systemd-journald.socket sysinit.target network-online.target", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "yes", "AssertTimestamp": "Tue 2025-06-24 12:59:08 EDT", "AssertTimestampMonotonic": "388917276", "Before": "zabbix-agent.service pmlogger.service multi-user.target pmie.service shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Tue 2025-06-24 12:59:08 EDT", "ConditionTimestampMonotonic": "388917274", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/system.slice/pmcd.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Performance Metrics Collector Daemon", "DevicePolicy": "auto", "Documentation": "man:pmcd(1)", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "17163", "ExecMainStartTimestamp": "Tue 2025-06-24 12:59:09 EDT", "ExecMainStartTimestampMonotonic": "389230851", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/libexec/pcp/lib/pmcd ; argv[]=/usr/libexec/pcp/lib/pmcd start-systemd ; ignore_errors=no ; start_time=[Tue 2025-06-24 12:59:08 EDT] ; stop_time=[n/a] ; pid=17067 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/libexec/pcp/lib/pmcd ; argv[]=/usr/libexec/pcp/lib/pmcd stop-systemd ; ignore_errors=no ; start_time=[Tue 2025-06-24 12:59:08 EDT] ; stop_time=[Tue 2025-06-24 12:59:08 EDT] ; pid=16702 ; code=exited ; status=0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/pmcd.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "pmcd.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestamp": "Tue 2025-06-24 12:59:08 EDT", "InactiveEnterTimestampMonotonic": "388916458", "InactiveExitTimestamp": "Tue 2025-06-24 12:59:08 EDT", "InactiveExitTimestampMonotonic": "388918121", "InvocationID": "a8f7e69db5ca42d7815ce0ec09a1c3e5", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14004", "LimitNPROCSoft": "14004", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14004", "LimitSIGPENDINGSoft": "14004", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "17163", "MemoryAccounting": "yes", "MemoryCurrent": "37498880", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "pmcd.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PIDFile": "/run/pcp/pmcd.pid", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice sysinit.target", "Restart": "always", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Tue 2025-06-24 12:59:09 EDT", "StateChangeTimestampMonotonic": "389230875", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "9", "TasksMax": "22406", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target pmie.service pmlogger.service", "WatchdogTimestamp": "Tue 2025-06-24 12:59:09 EDT", "WatchdogTimestampMonotonic": "389230871", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric collector is restarted and enabled on boot] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:117 Tuesday 24 June 2025 12:59:47 -0400 (0:00:00.764) 0:00:17.617 ********** skipping: [managed-node3] => { "changed": false, "false_condition": "__pcp_restart_pmcd | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Include pmie] **** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/main.yml:65 Tuesday 24 June 2025 12:59:47 -0400 (0:00:00.037) 0:00:17.654 ********** included: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml for managed-node3 TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure extra performance rule group directories exist] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:4 Tuesday 24 June 2025 12:59:47 -0400 (0:00:00.044) 0:00:17.699 ********** ok: [managed-node3] => (item=network) => { "ansible_loop_var": "item", "changed": false, "gid": 0, "group": "root", "item": "network", "mode": "0755", "owner": "root", "path": "/etc/pcp/pmieconf/network", "secontext": "system_u:object_r:etc_t:s0", "size": 78, "state": "directory", "uid": 0 } ok: [managed-node3] => (item=power) => { "ansible_loop_var": "item", "changed": false, "gid": 0, "group": "root", "item": "power", "mode": "0755", "owner": "root", "path": "/etc/pcp/pmieconf/power", "secontext": "system_u:object_r:etc_t:s0", "size": 30, "state": "directory", "uid": 0 } ok: [managed-node3] => (item=zeroconf) => { "ansible_loop_var": "item", "changed": false, "gid": 0, "group": "root", "item": "zeroconf", "mode": "0755", "owner": "root", "path": "/etc/pcp/pmieconf/zeroconf", "secontext": "system_u:object_r:etc_t:s0", "size": 25, "state": "directory", "uid": 0 } ok: [managed-node3] => (item=filesys) => { "ansible_loop_var": "item", "changed": false, "gid": 0, "group": "root", "item": "filesys", "mode": "0755", "owner": "root", "path": "/etc/pcp/pmieconf/filesys", "secontext": "system_u:object_r:etc_t:s0", "size": 38, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure extra performance rule group link directories exist] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:14 Tuesday 24 June 2025 12:59:49 -0400 (0:00:01.391) 0:00:19.090 ********** ok: [managed-node3] => (item=network) => { "ansible_loop_var": "item", "changed": false, "gid": 0, "group": "root", "item": "network", "mode": "0755", "owner": "root", "path": "/var/lib/pcp/config/pmieconf/network", "secontext": "system_u:object_r:pcp_var_lib_t:s0", "size": 78, "state": "directory", "uid": 0 } ok: [managed-node3] => (item=power) => { "ansible_loop_var": "item", "changed": false, "gid": 0, "group": "root", "item": "power", "mode": "0755", "owner": "root", "path": "/var/lib/pcp/config/pmieconf/power", "secontext": "system_u:object_r:pcp_var_lib_t:s0", "size": 30, "state": "directory", "uid": 0 } ok: [managed-node3] => (item=zeroconf) => { "ansible_loop_var": "item", "changed": false, "gid": 0, "group": "root", "item": "zeroconf", "mode": "0755", "owner": "root", "path": "/var/lib/pcp/config/pmieconf/zeroconf", "secontext": "system_u:object_r:pcp_var_lib_t:s0", "size": 25, "state": "directory", "uid": 0 } ok: [managed-node3] => (item=filesys) => { "ansible_loop_var": "item", "changed": false, "gid": 0, "group": "root", "item": "filesys", "mode": "0755", "owner": "root", "path": "/var/lib/pcp/config/pmieconf/filesys", "secontext": "system_u:object_r:pcp_var_lib_t:s0", "size": 38, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure extra performance rules are installed for targeted hosts] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:24 Tuesday 24 June 2025 12:59:50 -0400 (0:00:01.422) 0:00:20.513 ********** ok: [managed-node3] => (item=network/tcplistenoverflows) => { "ansible_loop_var": "item", "changed": false, "checksum": "608d8a6ac6ee33bb86b77d28ba24fbcd378db43d", "dest": "/etc/pcp/pmieconf/network/tcplistenoverflows", "gid": 0, "group": "root", "item": "network/tcplistenoverflows", "mode": "0644", "owner": "root", "path": "/etc/pcp/pmieconf/network/tcplistenoverflows", "secontext": "system_u:object_r:etc_t:s0", "size": 971, "state": "file", "uid": 0 } ok: [managed-node3] => (item=network/tcpqfulldocookies) => { "ansible_loop_var": "item", "changed": false, "checksum": "3256a5c2e8d07a20d8e97a08c0ab163252b0beae", "dest": "/etc/pcp/pmieconf/network/tcpqfulldocookies", "gid": 0, "group": "root", "item": "network/tcpqfulldocookies", "mode": "0644", "owner": "root", "path": "/etc/pcp/pmieconf/network/tcpqfulldocookies", "secontext": "system_u:object_r:etc_t:s0", "size": 1131, "state": "file", "uid": 0 } ok: [managed-node3] => (item=network/tcpqfulldrops) => { "ansible_loop_var": "item", "changed": false, "checksum": "37b2bd7f2430bd9678ab078c5e69a53bea556524", "dest": "/etc/pcp/pmieconf/network/tcpqfulldrops", "gid": 0, "group": "root", "item": "network/tcpqfulldrops", "mode": "0644", "owner": "root", "path": "/etc/pcp/pmieconf/network/tcpqfulldrops", "secontext": "system_u:object_r:etc_t:s0", "size": 1129, "state": "file", "uid": 0 } ok: [managed-node3] => (item=power/thermal_throttle) => { "ansible_loop_var": "item", "changed": false, "checksum": "1d53d6182709617c8f633339652d8d9e75f3b603", "dest": "/etc/pcp/pmieconf/power/thermal_throttle", "gid": 0, "group": "root", "item": "power/thermal_throttle", "mode": "0644", "owner": "root", "path": "/etc/pcp/pmieconf/power/thermal_throttle", "secontext": "system_u:object_r:etc_t:s0", "size": 1153, "state": "file", "uid": 0 } ok: [managed-node3] => (item=zeroconf/all_threads) => { "ansible_loop_var": "item", "changed": false, "checksum": "65169db16dcaa224c211373001adc3addf1031c4", "dest": "/etc/pcp/pmieconf/zeroconf/all_threads", "gid": 0, "group": "root", "item": "zeroconf/all_threads", "mode": "0644", "owner": "root", "path": "/etc/pcp/pmieconf/zeroconf/all_threads", "secontext": "system_u:object_r:etc_t:s0", "size": 840, "state": "file", "uid": 0 } ok: [managed-node3] => (item=filesys/vfs_files) => { "ansible_loop_var": "item", "changed": false, "checksum": "cd5d85dfb8eebd7d9737d56e78bd969dafa3999c", "dest": "/etc/pcp/pmieconf/filesys/vfs_files", "gid": 0, "group": "root", "item": "filesys/vfs_files", "mode": "0644", "owner": "root", "path": "/etc/pcp/pmieconf/filesys/vfs_files", "secontext": "system_u:object_r:etc_t:s0", "size": 969, "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance rule actions are installed for targeted hosts] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:34 Tuesday 24 June 2025 12:59:54 -0400 (0:00:03.641) 0:00:24.155 ********** ok: [managed-node3] => { "ansible_facts": { "local_pmie": "default" }, "changed": false } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Check if global pmie webhook action is configured] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:38 Tuesday 24 June 2025 12:59:54 -0400 (0:00:00.022) 0:00:24.178 ********** skipping: [managed-node3] => (item=default) => { "ansible_loop_var": "item", "changed": false, "false_condition": "pcp_pmie_endpoint | length > 0", "item": "default", "skip_reason": "Conditional result was False" } skipping: [managed-node3] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Configure global webhook action] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:50 Tuesday 24 June 2025 12:59:54 -0400 (0:00:00.039) 0:00:24.217 ********** skipping: [managed-node3] => (item={'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', 'false_condition': 'pcp_pmie_endpoint | length > 0', 'item': 'default', 'ansible_loop_var': 'item'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "pcp_pmie_endpoint | length > 0", "item": { "ansible_loop_var": "item", "changed": false, "false_condition": "pcp_pmie_endpoint | length > 0", "item": "default", "skip_reason": "Conditional result was False", "skipped": true }, "skip_reason": "Conditional result was False" } skipping: [managed-node3] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Check if global webhook endpoint is configured] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:63 Tuesday 24 June 2025 12:59:54 -0400 (0:00:00.044) 0:00:24.262 ********** ok: [managed-node3] => (item=default) => { "ansible_loop_var": "item", "backup": "", "changed": false, "found": 0, "item": "default" } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Configure global webhook endpoint] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:73 Tuesday 24 June 2025 12:59:54 -0400 (0:00:00.483) 0:00:24.745 ********** skipping: [managed-node3] => (item={'changed': False, 'found': 0, 'msg': '', 'backup': '', 'diff': [{'before': '', 'after': '', 'before_header': '/var/lib/pcp/config/pmie/config.default (content)', 'after_header': '/var/lib/pcp/config/pmie/config.default (content)'}, {'before_header': '/var/lib/pcp/config/pmie/config.default (file attributes)', 'after_header': '/var/lib/pcp/config/pmie/config.default (file attributes)'}], 'invocation': {'module_args': {'state': 'absent', 'path': '/var/lib/pcp/config/pmie/config.default', 'regexp': '//.*global webhook_endpoint = ""', 'backrefs': False, 'create': False, 'backup': False, 'firstmatch': False, 'unsafe_writes': False, 'search_string': None, 'line': None, 'insertafter': None, 'insertbefore': None, 'validate': None, 'mode': None, 'owner': None, 'group': None, 'seuser': None, 'serole': None, 'selevel': None, 'setype': None, 'attributes': None}}, 'failed': False, 'item': 'default', 'ansible_loop_var': 'item'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "pcp_pmie_endpoint | length > 0", "item": { "ansible_loop_var": "item", "backup": "", "changed": false, "diff": [ { "after": "", "after_header": "/var/lib/pcp/config/pmie/config.default (content)", "before": "", "before_header": "/var/lib/pcp/config/pmie/config.default (content)" }, { "after_header": "/var/lib/pcp/config/pmie/config.default (file attributes)", "before_header": "/var/lib/pcp/config/pmie/config.default (file attributes)" } ], "failed": false, "found": 0, "invocation": { "module_args": { "attributes": null, "backrefs": false, "backup": false, "create": false, "firstmatch": false, "group": null, "insertafter": null, "insertbefore": null, "line": null, "mode": null, "owner": null, "path": "/var/lib/pcp/config/pmie/config.default", "regexp": "//.*global webhook_endpoint = \"\"", "search_string": null, "selevel": null, "serole": null, "setype": null, "seuser": null, "state": "absent", "unsafe_writes": false, "validate": null } }, "item": "default", "msg": "" }, "skip_reason": "Conditional result was False" } skipping: [managed-node3] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure extra rules symlinks have been created for targeted hosts] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:86 Tuesday 24 June 2025 12:59:54 -0400 (0:00:00.055) 0:00:24.801 ********** ok: [managed-node3] => (item=network/tcplistenoverflows) => { "ansible_loop_var": "item", "changed": false, "dest": "/var/lib/pcp/config/pmieconf/network/tcplistenoverflows", "gid": 0, "group": "root", "item": "network/tcplistenoverflows", "mode": "0777", "owner": "root", "secontext": "unconfined_u:object_r:pcp_var_lib_t:s0", "size": 44, "src": "/etc/pcp/pmieconf/network/tcplistenoverflows", "state": "link", "uid": 0 } ok: [managed-node3] => (item=network/tcpqfulldocookies) => { "ansible_loop_var": "item", "changed": false, "dest": "/var/lib/pcp/config/pmieconf/network/tcpqfulldocookies", "gid": 0, "group": "root", "item": "network/tcpqfulldocookies", "mode": "0777", "owner": "root", "secontext": "unconfined_u:object_r:pcp_var_lib_t:s0", "size": 43, "src": "/etc/pcp/pmieconf/network/tcpqfulldocookies", "state": "link", "uid": 0 } ok: [managed-node3] => (item=network/tcpqfulldrops) => { "ansible_loop_var": "item", "changed": false, "dest": "/var/lib/pcp/config/pmieconf/network/tcpqfulldrops", "gid": 0, "group": "root", "item": "network/tcpqfulldrops", "mode": "0777", "owner": "root", "secontext": "unconfined_u:object_r:pcp_var_lib_t:s0", "size": 39, "src": "/etc/pcp/pmieconf/network/tcpqfulldrops", "state": "link", "uid": 0 } ok: [managed-node3] => (item=power/thermal_throttle) => { "ansible_loop_var": "item", "changed": false, "dest": "/var/lib/pcp/config/pmieconf/power/thermal_throttle", "gid": 0, "group": "root", "item": "power/thermal_throttle", "mode": "0777", "owner": "root", "secontext": "unconfined_u:object_r:pcp_var_lib_t:s0", "size": 40, "src": "/etc/pcp/pmieconf/power/thermal_throttle", "state": "link", "uid": 0 } ok: [managed-node3] => (item=zeroconf/all_threads) => { "ansible_loop_var": "item", "changed": false, "dest": "/var/lib/pcp/config/pmieconf/zeroconf/all_threads", "gid": 0, "group": "root", "item": "zeroconf/all_threads", "mode": "0777", "owner": "root", "secontext": "unconfined_u:object_r:pcp_var_lib_t:s0", "size": 38, "src": "/etc/pcp/pmieconf/zeroconf/all_threads", "state": "link", "uid": 0 } ok: [managed-node3] => (item=filesys/vfs_files) => { "ansible_loop_var": "item", "changed": false, "dest": "/var/lib/pcp/config/pmieconf/filesys/vfs_files", "gid": 0, "group": "root", "item": "filesys/vfs_files", "mode": "0777", "owner": "root", "secontext": "unconfined_u:object_r:pcp_var_lib_t:s0", "size": 35, "src": "/etc/pcp/pmieconf/filesys/vfs_files", "state": "link", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Enable performance metric inference for targeted hosts (with control.d)] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:95 Tuesday 24 June 2025 12:59:57 -0400 (0:00:02.240) 0:00:27.041 ********** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Enable performance metric inference for targeted hosts (single control)] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:106 Tuesday 24 June 2025 12:59:57 -0400 (0:00:00.038) 0:00:27.080 ********** skipping: [managed-node3] => { "changed": false, "false_condition": "pcp_single_control | d(true) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Set variable to do pmie restart if needed] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:116 Tuesday 24 June 2025 12:59:57 -0400 (0:00:00.040) 0:00:27.121 ********** ok: [managed-node3] => { "ansible_facts": { "__pcp_restart_pmie": false }, "changed": false } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric inference is running and enabled on boot] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:120 Tuesday 24 June 2025 12:59:57 -0400 (0:00:00.026) 0:00:27.147 ********** ok: [managed-node3] => { "changed": false, "enabled": true, "name": "pmie", "state": "started", "status": { "ActiveEnterTimestamp": "Tue 2025-06-24 12:59:20 EDT", "ActiveEnterTimestampMonotonic": "400548123", "ActiveExitTimestamp": "Tue 2025-06-24 12:59:20 EDT", "ActiveExitTimestampMonotonic": "400082447", "ActiveState": "active", "After": "systemd-journald.socket network-online.target basic.target system.slice sysinit.target pmcd.service", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "yes", "AssertTimestamp": "Tue 2025-06-24 12:59:20 EDT", "AssertTimestampMonotonic": "400318242", "Before": "multi-user.target pmie_farm.service shutdown.target pmie_daily.timer pmie_check.timer", "BindsTo": "pmie_farm.service pmie_daily.timer pmie_check.timer", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Tue 2025-06-24 12:59:20 EDT", "ConditionTimestampMonotonic": "400318240", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ConsistsOf": "pmie_farm.service", "ControlGroup": "/system.slice/pmie.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Performance Metrics Inference Engine", "DevicePolicy": "auto", "Documentation": "man:pmie(1)", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "Environment": "PMIE_CHECK_PARAMS=--only-primary", "EnvironmentFiles": "/etc/sysconfig/pmie (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "21281", "ExecMainStartTimestamp": "Tue 2025-06-24 12:59:20 EDT", "ExecMainStartTimestampMonotonic": "400548101", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/libexec/pcp/lib/pmie ; argv[]=/usr/libexec/pcp/lib/pmie start-systemd ; ignore_errors=no ; start_time=[Tue 2025-06-24 12:59:20 EDT] ; stop_time=[n/a] ; pid=21064 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/libexec/pcp/lib/pmie ; argv[]=/usr/libexec/pcp/lib/pmie stop-systemd ; ignore_errors=no ; start_time=[Tue 2025-06-24 12:59:20 EDT] ; stop_time=[Tue 2025-06-24 12:59:20 EDT] ; pid=20852 ; code=exited ; status=0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/pmie.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "pmie.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestamp": "Tue 2025-06-24 12:59:20 EDT", "InactiveEnterTimestampMonotonic": "400317248", "InactiveExitTimestamp": "Tue 2025-06-24 12:59:20 EDT", "InactiveExitTimestampMonotonic": "400319537", "InvocationID": "55b7c6a0971a48c98a1491eb4e8d7872", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14004", "LimitNPROCSoft": "14004", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14004", "LimitSIGPENDINGSoft": "14004", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "21281", "MemoryAccounting": "yes", "MemoryCurrent": "1605632", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "pmie.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PIDFile": "/run/pcp/pmie.pid", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice sysinit.target", "Restart": "always", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Tue 2025-06-24 12:59:20 EDT", "StateChangeTimestampMonotonic": "400548123", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "1", "TasksMax": "22406", "TimeoutStartUSec": "2min", "TimeoutStopUSec": "2min", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "pmcd.service", "WatchdogTimestamp": "Tue 2025-06-24 12:59:20 EDT", "WatchdogTimestampMonotonic": "400548120", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric inference is restarted and enabled on boot] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:127 Tuesday 24 June 2025 12:59:57 -0400 (0:00:00.532) 0:00:27.680 ********** skipping: [managed-node3] => { "changed": false, "false_condition": "__pcp_restart_pmie | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Include pmlogger] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/main.yml:68 Tuesday 24 June 2025 12:59:57 -0400 (0:00:00.035) 0:00:27.715 ********** included: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmlogger.yml for managed-node3 TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure metric log location is configured] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmlogger.yml:4 Tuesday 24 June 2025 12:59:57 -0400 (0:00:00.046) 0:00:27.762 ********** ok: [managed-node3] => { "backup": "", "changed": false } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric logging is configured] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmlogger.yml:12 Tuesday 24 June 2025 12:59:58 -0400 (0:00:00.347) 0:00:28.109 ********** ok: [managed-node3] => { "changed": false, "checksum": "67bc35973101c614e92b1990f8bebfffc39fe498", "dest": "/etc/sysconfig/pmlogger", "gid": 0, "group": "root", "mode": "0644", "owner": "root", "path": "/etc/sysconfig/pmlogger", "secontext": "system_u:object_r:etc_t:s0", "size": 1180, "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric logging retention period is set] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmlogger.yml:19 Tuesday 24 June 2025 12:59:58 -0400 (0:00:00.668) 0:00:28.777 ********** ok: [managed-node3] => { "changed": false, "checksum": "df7bd3b5b6f1de3af164aab81441c7251a13a298", "dest": "/etc/sysconfig/pmlogger_timers", "gid": 0, "group": "root", "mode": "0644", "owner": "root", "path": "/etc/sysconfig/pmlogger_timers", "secontext": "system_u:object_r:etc_t:s0", "size": 988, "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Enable performance metric logging for targeted hosts (with control.d)] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmlogger.yml:27 Tuesday 24 June 2025 12:59:59 -0400 (0:00:00.649) 0:00:29.427 ********** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Enable performance metric logging for targeted hosts (single control)] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmlogger.yml:39 Tuesday 24 June 2025 12:59:59 -0400 (0:00:00.039) 0:00:29.466 ********** skipping: [managed-node3] => { "changed": false, "false_condition": "pcp_single_control | d(true) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Set variable to do pmlogger restart if needed] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmlogger.yml:49 Tuesday 24 June 2025 12:59:59 -0400 (0:00:00.037) 0:00:29.503 ********** ok: [managed-node3] => { "ansible_facts": { "__pcp_restart_pmlogger": false }, "changed": false } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric logging is running and enabled on boot] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmlogger.yml:53 Tuesday 24 June 2025 12:59:59 -0400 (0:00:00.025) 0:00:29.529 ********** ok: [managed-node3] => { "changed": false, "enabled": true, "name": "pmlogger", "state": "started", "status": { "ActiveEnterTimestamp": "Tue 2025-06-24 12:59:25 EDT", "ActiveEnterTimestampMonotonic": "405552152", "ActiveExitTimestamp": "Tue 2025-06-24 12:59:24 EDT", "ActiveExitTimestampMonotonic": "404895614", "ActiveState": "active", "After": "basic.target pmcd.service system.slice network-online.target sysinit.target systemd-journald.socket", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "yes", "AssertTimestamp": "Tue 2025-06-24 12:59:24 EDT", "AssertTimestampMonotonic": "404968961", "Before": "pmlogger_check.timer pmlogger_daily.timer shutdown.target multi-user.target pmlogger_farm.service", "BindsTo": "pmlogger_check.timer pmlogger_daily.timer pmlogger_farm.service", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Tue 2025-06-24 12:59:24 EDT", "ConditionTimestampMonotonic": "404968960", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ConsistsOf": "pmlogger_farm.service", "ControlGroup": "/system.slice/pmlogger.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Performance Metrics Archive Logger", "DevicePolicy": "auto", "Documentation": "man:pmlogger(1)", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "Environment": "PMLOGGER_CHECK_PARAMS=--only-primary", "EnvironmentFiles": "/etc/sysconfig/pmlogger (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "23035", "ExecMainStartTimestamp": "Tue 2025-06-24 12:59:25 EDT", "ExecMainStartTimestampMonotonic": "405552125", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/libexec/pcp/lib/pmlogger ; argv[]=/usr/libexec/pcp/lib/pmlogger start-systemd ; ignore_errors=no ; start_time=[Tue 2025-06-24 12:59:24 EDT] ; stop_time=[n/a] ; pid=22811 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/libexec/pcp/lib/pmlogger ; argv[]=/usr/libexec/pcp/lib/pmlogger stop-systemd ; ignore_errors=no ; start_time=[Tue 2025-06-24 12:59:24 EDT] ; stop_time=[Tue 2025-06-24 12:59:24 EDT] ; pid=22753 ; code=exited ; status=0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/pmlogger.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "pmlogger.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestamp": "Tue 2025-06-24 12:59:24 EDT", "InactiveEnterTimestampMonotonic": "404968092", "InactiveExitTimestamp": "Tue 2025-06-24 12:59:24 EDT", "InactiveExitTimestampMonotonic": "404970097", "InvocationID": "05ac186e5255420a83037767a5665f85", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14004", "LimitNPROCSoft": "14004", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14004", "LimitSIGPENDINGSoft": "14004", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "23035", "MemoryAccounting": "yes", "MemoryCurrent": "2965504", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "pmlogger.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PIDFile": "/run/pcp/pmlogger.pid", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice sysinit.target", "Restart": "always", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Tue 2025-06-24 12:59:25 EDT", "StateChangeTimestampMonotonic": "405552152", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "1", "TasksMax": "22406", "TimeoutStartUSec": "2min", "TimeoutStopUSec": "2min", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "pmcd.service", "WatchdogTimestamp": "Tue 2025-06-24 12:59:25 EDT", "WatchdogTimestampMonotonic": "405552149", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric logging is restarted and enabled on boot] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmlogger.yml:60 Tuesday 24 June 2025 13:00:00 -0400 (0:00:00.515) 0:00:30.044 ********** skipping: [managed-node3] => { "changed": false, "false_condition": "__pcp_restart_pmlogger | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.private_metrics_subrole_pcp : Include pmproxy] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/main.yml:71 Tuesday 24 June 2025 13:00:00 -0400 (0:00:00.040) 0:00:30.084 ********** skipping: [managed-node3] => { "changed": false, "false_condition": "pcp_rest_api | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [Setup metric graphing service.] ****************************************** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:126 Tuesday 24 June 2025 13:00:00 -0400 (0:00:00.047) 0:00:30.132 ********** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_graph_service | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Configure firewall] ****************** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:134 Tuesday 24 June 2025 13:00:00 -0400 (0:00:00.037) 0:00:30.169 ********** included: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/firewall.yml for managed-node3 TASK [fedora.linux_system_roles.metrics : Initialize __metrics_firewall] ******* task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/firewall.yml:9 Tuesday 24 June 2025 13:00:00 -0400 (0:00:00.054) 0:00:30.223 ********** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_manage_firewall | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Port for pmcd] *********************** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/firewall.yml:13 Tuesday 24 June 2025 13:00:00 -0400 (0:00:00.044) 0:00:30.268 ********** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_manage_firewall | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Port for pmproxy used by query and grafana] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/firewall.yml:19 Tuesday 24 June 2025 13:00:00 -0400 (0:00:00.048) 0:00:30.316 ********** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_manage_firewall | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Service for grafana] ***************** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/firewall.yml:25 Tuesday 24 June 2025 13:00:00 -0400 (0:00:00.045) 0:00:30.362 ********** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_manage_firewall | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Service for valkey] ****************** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/firewall.yml:31 Tuesday 24 June 2025 13:00:00 -0400 (0:00:00.043) 0:00:30.405 ********** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_manage_firewall | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Service for redis] ******************* task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/firewall.yml:38 Tuesday 24 June 2025 13:00:00 -0400 (0:00:00.062) 0:00:30.467 ********** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_manage_firewall | bool", "skip_reason": "Conditional result was False" } TASK [Ensure the service and the port status with the firewall role] *********** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/firewall.yml:45 Tuesday 24 June 2025 13:00:00 -0400 (0:00:00.079) 0:00:30.546 ********** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_manage_firewall | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.metrics : Configure selinux] ******************* task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/main.yml:137 Tuesday 24 June 2025 13:00:00 -0400 (0:00:00.034) 0:00:30.581 ********** included: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/selinux.yml for managed-node3 TASK [fedora.linux_system_roles.metrics : Set pcp_bind_all_unreserved_ports] *** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/selinux.yml:6 Tuesday 24 June 2025 13:00:00 -0400 (0:00:00.033) 0:00:30.614 ********** ok: [managed-node3] => { "ansible_facts": { "__metrics_selinux": [ [ { "name": "pcp_bind_all_unreserved_ports", "state": "on" } ] ] }, "changed": false } TASK [Ensure the port status with the selinux role] **************************** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/selinux.yml:11 Tuesday 24 June 2025 13:00:00 -0400 (0:00:00.036) 0:00:30.651 ********** ERROR! the role 'fedora.linux_system_roles.selinux' was not found in fedora.linux_system_roles:ansible.legacy:/tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/tests/metrics/roles:/root/.ansible/roles:/usr/share/ansible/roles:/etc/ansible/roles:/tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/tests/metrics The error appears to be in '/tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/metrics/tasks/selinux.yml': line 13, column 15, but may be elsewhere in the file depending on the exact syntax problem. The offending line appears to be: include_role: name: fedora.linux_system_roles.selinux ^ here TASK [Handle failure case] ***************************************************** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_verify_basic.yml:40 Tuesday 24 June 2025 13:00:00 -0400 (0:00:00.040) 0:00:30.692 ********** included: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/tests/metrics/handle_test_failure.yml for managed-node3 TASK [Collect logs] ************************************************************ task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/tests/metrics/handle_test_failure.yml:2 Tuesday 24 June 2025 13:00:00 -0400 (0:00:00.032) 0:00:30.725 ********** ok: [managed-node3] => { "changed": false, "cmd": "journalctl -ex\necho '##################'\necho List of SELinux AVCs - note list may be empty\ngrep type=AVC /var/log/audit/audit.log\necho '##################'\nls -alrtF /run\nif [ -d /run/pcp ]; then\n ls -alrtF /run/pcp\nelse\n echo ERROR - /run/pcp does not exist\nfi\n", "delta": "0:00:00.033393", "end": "2025-06-24 13:00:01.065323", "rc": 0, "start": "2025-06-24 13:00:01.031930" } STDOUT: -- Logs begin at Tue 2025-06-24 12:52:40 EDT, end at Tue 2025-06-24 13:00:01 EDT. -- Jun 24 12:52:40 localhost.localdomain kernel: Running certificate verification selftests Jun 24 12:52:40 localhost.localdomain kernel: Loaded X.509 cert 'Certificate verification self-testing key: f58703bb33ce1b73ee02eccdee5b8817518fe3db' Jun 24 12:52:40 localhost.localdomain kernel: Block layer SCSI generic (bsg) driver version 0.4 loaded (major 247) Jun 24 12:52:40 localhost.localdomain kernel: io scheduler mq-deadline registered Jun 24 12:52:40 localhost.localdomain kernel: io scheduler kyber registered Jun 24 12:52:40 localhost.localdomain kernel: io scheduler bfq registered Jun 24 12:52:40 localhost.localdomain kernel: atomic64_test: passed for x86-64 platform with CX8 and with SSE Jun 24 12:52:40 localhost.localdomain kernel: shpchp: Standard Hot Plug PCI Controller Driver version: 0.4 Jun 24 12:52:40 localhost.localdomain kernel: input: Power Button as /devices/LNXSYSTM:00/LNXPWRBN:00/input/input0 Jun 24 12:52:40 localhost.localdomain kernel: ACPI: Power Button [PWRF] Jun 24 12:52:40 localhost.localdomain kernel: input: Sleep Button as /devices/LNXSYSTM:00/LNXSLPBN:00/input/input1 Jun 24 12:52:40 localhost.localdomain kernel: ACPI: Sleep Button [SLPF] Jun 24 12:52:40 localhost.localdomain kernel: xen: --> pirq=22 -> irq=47 (gsi=47) Jun 24 12:52:40 localhost.localdomain kernel: xen:grant_table: Grant tables using version 1 layout Jun 24 12:52:40 localhost.localdomain kernel: Grant table initialized Jun 24 12:52:40 localhost.localdomain kernel: Cannot get hvm parameter CONSOLE_EVTCHN (18): -22! Jun 24 12:52:40 localhost.localdomain kernel: Serial: 8250/16550 driver, 4 ports, IRQ sharing enabled Jun 24 12:52:40 localhost.localdomain kernel: 00:06: ttyS0 at I/O 0x3f8 (irq = 4, base_baud = 115200) is a 16550A Jun 24 12:52:40 localhost.localdomain kernel: Non-volatile memory driver v1.3 Jun 24 12:52:40 localhost.localdomain kernel: rdac: device handler registered Jun 24 12:52:40 localhost.localdomain kernel: hp_sw: device handler registered Jun 24 12:52:40 localhost.localdomain kernel: emc: device handler registered Jun 24 12:52:40 localhost.localdomain kernel: alua: device handler registered Jun 24 12:52:40 localhost.localdomain kernel: libphy: Fixed MDIO Bus: probed Jun 24 12:52:40 localhost.localdomain kernel: usbcore: registered new interface driver usbserial_generic Jun 24 12:52:40 localhost.localdomain kernel: usbserial: USB Serial support registered for generic Jun 24 12:52:40 localhost.localdomain kernel: i8042: PNP: PS/2 Controller [PNP0303:PS2K,PNP0f13:PS2M] at 0x60,0x64 irq 1,12 Jun 24 12:52:40 localhost.localdomain kernel: i8042: Warning: Keylock active Jun 24 12:52:40 localhost.localdomain kernel: serio: i8042 KBD port at 0x60,0x64 irq 1 Jun 24 12:52:40 localhost.localdomain kernel: serio: i8042 AUX port at 0x60,0x64 irq 12 Jun 24 12:52:40 localhost.localdomain kernel: mousedev: PS/2 mouse device common for all mice Jun 24 12:52:40 localhost.localdomain kernel: rtc_cmos 00:02: registered as rtc0 Jun 24 12:52:40 localhost.localdomain kernel: rtc_cmos 00:02: alarms up to one day, 114 bytes nvram, hpet irqs Jun 24 12:52:40 localhost.localdomain kernel: intel_pstate: CPU model not supported Jun 24 12:52:40 localhost.localdomain kernel: hid: raw HID events driver (C) Jiri Kosina Jun 24 12:52:40 localhost.localdomain kernel: usbcore: registered new interface driver usbhid Jun 24 12:52:40 localhost.localdomain kernel: usbhid: USB HID core driver Jun 24 12:52:40 localhost.localdomain kernel: drop_monitor: Initializing network drop monitor service Jun 24 12:52:40 localhost.localdomain kernel: Initializing XFRM netlink socket Jun 24 12:52:40 localhost.localdomain kernel: NET: Registered protocol family 10 Jun 24 12:52:40 localhost.localdomain kernel: Segment Routing with IPv6 Jun 24 12:52:40 localhost.localdomain kernel: NET: Registered protocol family 17 Jun 24 12:52:40 localhost.localdomain kernel: mpls_gso: MPLS GSO support Jun 24 12:52:40 localhost.localdomain kernel: AVX2 version of gcm_enc/dec engaged. Jun 24 12:52:40 localhost.localdomain kernel: AES CTR mode by8 optimization enabled Jun 24 12:52:40 localhost.localdomain kernel: sched_clock: Marking stable (514884138, 0)->(17203153376, -16688269238) Jun 24 12:52:40 localhost.localdomain kernel: registered taskstats version 1 Jun 24 12:52:40 localhost.localdomain kernel: Loading compiled-in X.509 certificates Jun 24 12:52:40 localhost.localdomain kernel: Loaded X.509 cert 'Red Hat Enterprise Linux kernel signing key: 9036118f247aaa99970a25e487b5a3a55df7255f' Jun 24 12:52:40 localhost.localdomain kernel: Loaded X.509 cert 'Red Hat Enterprise Linux Driver Update Program (key 3): bf57f3e87362bc7229d9f465321773dfd1f77a80' Jun 24 12:52:40 localhost.localdomain kernel: Loaded X.509 cert 'Red Hat Enterprise Linux kpatch signing key: 4d38fd864ebe18c5f0b72e3852e2014c3a676fc8' Jun 24 12:52:40 localhost.localdomain kernel: zswap: loaded using pool lzo/zbud Jun 24 12:52:40 localhost.localdomain kernel: page_owner is disabled Jun 24 12:52:40 localhost.localdomain kernel: Key type big_key registered Jun 24 12:52:40 localhost.localdomain kernel: Key type encrypted registered Jun 24 12:52:40 localhost.localdomain kernel: ima: No TPM chip found, activating TPM-bypass! Jun 24 12:52:40 localhost.localdomain kernel: ima: Allocated hash algorithm: sha256 Jun 24 12:52:40 localhost.localdomain kernel: ima: No architecture policies found Jun 24 12:52:40 localhost.localdomain kernel: evm: Initialising EVM extended attributes: Jun 24 12:52:40 localhost.localdomain kernel: evm: security.selinux Jun 24 12:52:40 localhost.localdomain kernel: evm: security.ima Jun 24 12:52:40 localhost.localdomain kernel: evm: security.capability Jun 24 12:52:40 localhost.localdomain kernel: evm: HMAC attrs: 0x1 Jun 24 12:52:40 localhost.localdomain kernel: xenbus_probe_frontend: Device with no driver: device/vbd/768 Jun 24 12:52:40 localhost.localdomain kernel: xenbus_probe_frontend: Device with no driver: device/vif/0 Jun 24 12:52:40 localhost.localdomain kernel: rtc_cmos 00:02: setting system clock to 2025-06-24 16:52:40 UTC (1750783960) Jun 24 12:52:40 localhost.localdomain kernel: Freeing unused decrypted memory: 2028K Jun 24 12:52:40 localhost.localdomain kernel: Freeing unused kernel image (initmem) memory: 2820K Jun 24 12:52:40 localhost.localdomain kernel: Write protecting the kernel read-only data: 26624k Jun 24 12:52:40 localhost.localdomain kernel: Freeing unused kernel image (text/rodata gap) memory: 2016K Jun 24 12:52:40 localhost.localdomain kernel: Freeing unused kernel image (rodata/data gap) memory: 1672K Jun 24 12:52:40 localhost.localdomain systemd[1]: systemd 239 (239-82.el8.1) running in system mode. (+PAM +AUDIT +SELINUX +IMA -APPARMOR +SMACK +SYSVINIT +UTMP +LIBCRYPTSETUP +GCRYPT +GNUTLS +ACL +XZ +LZ4 +SECCOMP +BLKID +ELFUTILS +KMOD +IDN2 -IDN +PCRE2 default-hierarchy=legacy) Jun 24 12:52:40 localhost.localdomain systemd[1]: Detected virtualization xen. Jun 24 12:52:40 localhost.localdomain systemd[1]: Detected architecture x86-64. Jun 24 12:52:40 localhost.localdomain systemd[1]: Running in initial RAM disk. Jun 24 12:52:40 localhost.localdomain systemd[1]: Set hostname to . Jun 24 12:52:40 localhost.localdomain systemd[1]: Listening on udev Kernel Socket. Jun 24 12:52:40 localhost.localdomain systemd[1]: Reached target Timers. Jun 24 12:52:40 localhost.localdomain systemd[1]: Reached target Swap. Jun 24 12:52:40 localhost.localdomain systemd[1]: Listening on Journal Socket (/dev/log). Jun 24 12:52:40 localhost.localdomain systemd[1]: Reached target Slices. Jun 24 12:52:40 localhost.localdomain systemd-journald[217]: Journal started -- Subject: The journal has been started -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The system journal process has started up, opened the journal -- files for writing and is now ready to process requests. Jun 24 12:52:40 localhost.localdomain systemd-journald[217]: Runtime journal (/run/log/journal/296974b5ae524724a4e5b0037952a204) is 8.0M, max 176.9M, 168.9M free. -- Subject: Disk space used by the journal -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Runtime journal (/run/log/journal/296974b5ae524724a4e5b0037952a204) is currently using 8.0M. -- Maximum allowed usage is set to 176.9M. -- Leaving at least 265.4M free (of currently available 1.7G of disk space). -- Enforced usage limit is thus 176.9M, of which 168.9M are still available. -- -- The limits controlling how much disk space is used by the journal may -- be configured with SystemMaxUse=, SystemKeepFree=, SystemMaxFileSize=, -- RuntimeMaxUse=, RuntimeKeepFree=, RuntimeMaxFileSize= settings in -- /etc/systemd/journald.conf. See journald.conf(5) for details. Jun 24 12:52:40 localhost.localdomain systemd[1]: memstrack.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit memstrack.service has successfully entered the 'dead' state. Jun 24 12:52:40 localhost.localdomain systemd[1]: systemd-vconsole-setup.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit systemd-vconsole-setup.service has successfully entered the 'dead' state. Jun 24 12:52:40 localhost.localdomain systemd[1]: Started Setup Virtual Console. -- Subject: Unit systemd-vconsole-setup.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-vconsole-setup.service has finished starting up. -- -- The start-up result is done. Jun 24 12:52:40 localhost.localdomain systemd[1]: Starting dracut cmdline hook... -- Subject: Unit dracut-cmdline.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit dracut-cmdline.service has begun starting up. Jun 24 12:52:40 localhost.localdomain dracut-cmdline[247]: dracut-8 dracut-049-233.git20240115.el8 Jun 24 12:52:40 localhost.localdomain dracut-cmdline[247]: Using kernel command line parameters: BOOT_IMAGE=(hd0,msdos1)/boot/vmlinuz-4.18.0-553.5.1.el8.x86_64 root=UUID=fe591198-9082-4b15-9b62-e83518524cd2 ro crashkernel=auto net.ifnames=0 rhgb quiet Jun 24 12:52:40 localhost.localdomain systemd[1]: Started dracut cmdline hook. -- Subject: Unit dracut-cmdline.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit dracut-cmdline.service has finished starting up. -- -- The start-up result is done. Jun 24 12:52:40 localhost.localdomain systemd[1]: Starting dracut pre-udev hook... -- Subject: Unit dracut-pre-udev.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit dracut-pre-udev.service has begun starting up. Jun 24 12:52:40 localhost.localdomain systemd[1]: Started dracut pre-udev hook. -- Subject: Unit dracut-pre-udev.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit dracut-pre-udev.service has finished starting up. -- -- The start-up result is done. Jun 24 12:52:40 localhost.localdomain systemd[1]: Starting udev Kernel Device Manager... -- Subject: Unit systemd-udevd.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-udevd.service has begun starting up. Jun 24 12:52:40 localhost.localdomain systemd-udevd[309]: Network interface NamePolicy= disabled on kernel command line, ignoring. Jun 24 12:52:40 localhost.localdomain systemd[1]: Started udev Kernel Device Manager. -- Subject: Unit systemd-udevd.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-udevd.service has finished starting up. -- -- The start-up result is done. Jun 24 12:52:40 localhost.localdomain systemd[1]: Starting udev Coldplug all Devices... -- Subject: Unit systemd-udev-trigger.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-udev-trigger.service has begun starting up. Jun 24 12:52:40 localhost.localdomain systemd[1]: Mounting Kernel Configuration File System... -- Subject: Unit sys-kernel-config.mount has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit sys-kernel-config.mount has begun starting up. Jun 24 12:52:40 localhost.localdomain systemd[1]: Mounted Kernel Configuration File System. -- Subject: Unit sys-kernel-config.mount has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit sys-kernel-config.mount has finished starting up. -- -- The start-up result is done. Jun 24 12:52:40 localhost.localdomain kernel: input: AT Translated Set 2 keyboard as /devices/platform/i8042/serio0/input/input2 Jun 24 12:52:41 localhost.localdomain systemd[1]: Started udev Coldplug all Devices. -- Subject: Unit systemd-udev-trigger.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-udev-trigger.service has finished starting up. -- -- The start-up result is done. Jun 24 12:52:41 localhost.localdomain systemd[1]: Starting Show Plymouth Boot Screen... -- Subject: Unit plymouth-start.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit plymouth-start.service has begun starting up. Jun 24 12:52:41 localhost.localdomain systemd[1]: Reached target System Initialization. -- Subject: Unit sysinit.target has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit sysinit.target has finished starting up. -- -- The start-up result is done. Jun 24 12:52:41 localhost.localdomain systemd[1]: Starting dracut initqueue hook... -- Subject: Unit dracut-initqueue.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit dracut-initqueue.service has begun starting up. Jun 24 12:52:41 localhost.localdomain systemd[1]: Received SIGRTMIN+20 from PID 339 (plymouthd). Jun 24 12:52:41 localhost.localdomain systemd[1]: Started Show Plymouth Boot Screen. -- Subject: Unit plymouth-start.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit plymouth-start.service has finished starting up. -- -- The start-up result is done. Jun 24 12:52:41 localhost.localdomain systemd[1]: Reached target Paths. -- Subject: Unit paths.target has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit paths.target has finished starting up. -- -- The start-up result is done. Jun 24 12:52:41 localhost.localdomain systemd[1]: Started Forward Password Requests to Plymouth Directory Watch. -- Subject: Unit systemd-ask-password-plymouth.path has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-ask-password-plymouth.path has finished starting up. -- -- The start-up result is done. Jun 24 12:52:41 localhost.localdomain systemd[1]: Reached target Basic System. -- Subject: Unit basic.target has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit basic.target has finished starting up. -- -- The start-up result is done. Jun 24 12:52:41 localhost.localdomain kernel: libata version 3.00 loaded. Jun 24 12:52:41 localhost.localdomain kernel: ata_piix 0000:00:01.1: version 2.13 Jun 24 12:52:41 localhost.localdomain kernel: xen_netfront: Initialising Xen virtual ethernet driver Jun 24 12:52:41 localhost.localdomain kernel: Invalid max_queues (4), will use default max: 2. Jun 24 12:52:41 localhost.localdomain kernel: scsi host0: ata_piix Jun 24 12:52:41 localhost.localdomain systemd-udevd[336]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable. Jun 24 12:52:41 localhost.localdomain kernel: blkfront: xvda: barrier or flush: disabled; persistent grants: disabled; indirect descriptors: enabled; Jun 24 12:52:41 localhost.localdomain kernel: scsi host1: ata_piix Jun 24 12:52:41 localhost.localdomain kernel: ata1: PATA max MWDMA2 cmd 0x1f0 ctl 0x3f6 bmdma 0xc000 irq 14 Jun 24 12:52:41 localhost.localdomain kernel: ata2: PATA max MWDMA2 cmd 0x170 ctl 0x376 bmdma 0xc008 irq 15 Jun 24 12:52:41 localhost.localdomain kernel: ACPI: bus type drm_connector registered Jun 24 12:52:41 localhost.localdomain kernel: xvda: xvda1 Jun 24 12:52:41 localhost.localdomain kernel: cirrus 0000:00:02.0: vgaarb: deactivate vga console Jun 24 12:52:41 localhost.localdomain systemd-udevd[343]: Using default interface naming scheme 'rhel-8.0'. Jun 24 12:52:41 localhost.localdomain systemd-udevd[343]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable. Jun 24 12:52:41 localhost.localdomain systemd[1]: Found device /dev/disk/by-uuid/fe591198-9082-4b15-9b62-e83518524cd2. -- Subject: Unit dev-disk-by\x2duuid-fe591198\x2d9082\x2d4b15\x2d9b62\x2de83518524cd2.device has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit dev-disk-by\x2duuid-fe591198\x2d9082\x2d4b15\x2d9b62\x2de83518524cd2.device has finished starting up. -- -- The start-up result is done. Jun 24 12:52:41 localhost.localdomain systemd[1]: Reached target Initrd Root Device. -- Subject: Unit initrd-root-device.target has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit initrd-root-device.target has finished starting up. -- -- The start-up result is done. Jun 24 12:52:41 localhost.localdomain kernel: Console: switching to colour dummy device 80x25 Jun 24 12:52:41 localhost.localdomain kernel: [drm] Initialized cirrus 2.0.0 2019 for 0000:00:02.0 on minor 0 Jun 24 12:52:41 localhost.localdomain kernel: fbcon: cirrusdrmfb (fb0) is primary device Jun 24 12:52:41 localhost.localdomain kernel: cirrus 0000:00:02.0: [drm] drm_plane_enable_fb_damage_clips() not called Jun 24 12:52:41 localhost.localdomain kernel: Console: switching to colour frame buffer device 128x48 Jun 24 12:52:41 localhost.localdomain kernel: cirrus 0000:00:02.0: [drm] fb0: cirrusdrmfb frame buffer device Jun 24 12:52:41 localhost.localdomain systemd[1]: Started dracut initqueue hook. -- Subject: Unit dracut-initqueue.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit dracut-initqueue.service has finished starting up. -- -- The start-up result is done. Jun 24 12:52:41 localhost.localdomain systemd[1]: Reached target Remote File Systems (Pre). -- Subject: Unit remote-fs-pre.target has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit remote-fs-pre.target has finished starting up. -- -- The start-up result is done. Jun 24 12:52:41 localhost.localdomain systemd[1]: Reached target Remote File Systems. -- Subject: Unit remote-fs.target has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit remote-fs.target has finished starting up. -- -- The start-up result is done. Jun 24 12:52:41 localhost.localdomain systemd[1]: Starting File System Check on /dev/disk/by-uuid/fe591198-9082-4b15-9b62-e83518524cd2... -- Subject: Unit systemd-fsck-root.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-fsck-root.service has begun starting up. Jun 24 12:52:41 localhost.localdomain systemd-fsck[380]: /usr/sbin/fsck.xfs: XFS file system. Jun 24 12:52:41 localhost.localdomain systemd[1]: Started File System Check on /dev/disk/by-uuid/fe591198-9082-4b15-9b62-e83518524cd2. -- Subject: Unit systemd-fsck-root.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-fsck-root.service has finished starting up. -- -- The start-up result is done. Jun 24 12:52:41 localhost.localdomain systemd[1]: Mounting /sysroot... -- Subject: Unit sysroot.mount has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit sysroot.mount has begun starting up. Jun 24 12:52:41 localhost.localdomain kernel: tsc: Refined TSC clocksource calibration: 2899.979 MHz Jun 24 12:52:41 localhost.localdomain kernel: clocksource: tsc: mask: 0xffffffffffffffff max_cycles: 0x29cd2f4854d, max_idle_ns: 440795323984 ns Jun 24 12:52:41 localhost.localdomain kernel: SGI XFS with ACLs, security attributes, quota, no debug enabled Jun 24 12:52:41 localhost.localdomain kernel: XFS (xvda1): Mounting V5 Filesystem Jun 24 12:52:41 localhost.localdomain kernel: input: ImPS/2 Generic Wheel Mouse as /devices/platform/i8042/serio1/input/input4 Jun 24 12:52:43 localhost.localdomain kernel: XFS (xvda1): Ending clean mount Jun 24 12:52:43 localhost.localdomain systemd[1]: Mounted /sysroot. -- Subject: Unit sysroot.mount has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit sysroot.mount has finished starting up. -- -- The start-up result is done. Jun 24 12:52:43 localhost.localdomain systemd[1]: Reached target Initrd Root File System. -- Subject: Unit initrd-root-fs.target has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit initrd-root-fs.target has finished starting up. -- -- The start-up result is done. Jun 24 12:52:43 localhost.localdomain systemd[1]: Starting Reload Configuration from the Real Root... -- Subject: Unit initrd-parse-etc.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit initrd-parse-etc.service has begun starting up. Jun 24 12:52:43 localhost.localdomain systemd[1]: Reloading. Jun 24 12:52:43 localhost.localdomain systemd[1]: initrd-parse-etc.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit initrd-parse-etc.service has successfully entered the 'dead' state. Jun 24 12:52:43 localhost.localdomain systemd[1]: Started Reload Configuration from the Real Root. -- Subject: Unit initrd-parse-etc.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit initrd-parse-etc.service has finished starting up. -- -- The start-up result is done. Jun 24 12:52:43 localhost.localdomain systemd[1]: Reached target Initrd File Systems. -- Subject: Unit initrd-fs.target has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit initrd-fs.target has finished starting up. -- -- The start-up result is done. Jun 24 12:52:43 localhost.localdomain systemd[1]: Reached target Initrd Default Target. -- Subject: Unit initrd.target has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit initrd.target has finished starting up. -- -- The start-up result is done. Jun 24 12:52:43 localhost.localdomain systemd[1]: Starting dracut pre-pivot and cleanup hook... -- Subject: Unit dracut-pre-pivot.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit dracut-pre-pivot.service has begun starting up. Jun 24 12:52:43 localhost.localdomain systemd[1]: Started dracut pre-pivot and cleanup hook. -- Subject: Unit dracut-pre-pivot.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit dracut-pre-pivot.service has finished starting up. -- -- The start-up result is done. Jun 24 12:52:43 localhost.localdomain systemd[1]: Starting Cleaning Up and Shutting Down Daemons... -- Subject: Unit initrd-cleanup.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit initrd-cleanup.service has begun starting up. Jun 24 12:52:43 localhost.localdomain systemd[1]: Starting Setup Virtual Console... -- Subject: Unit systemd-vconsole-setup.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-vconsole-setup.service has begun starting up. Jun 24 12:52:43 localhost.localdomain systemd[1]: dracut-pre-pivot.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit dracut-pre-pivot.service has successfully entered the 'dead' state. Jun 24 12:52:43 localhost.localdomain systemd[1]: Stopped dracut pre-pivot and cleanup hook. -- Subject: Unit dracut-pre-pivot.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit dracut-pre-pivot.service has finished shutting down. Jun 24 12:52:43 localhost.localdomain systemd[1]: Stopped target Remote File Systems. -- Subject: Unit remote-fs.target has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit remote-fs.target has finished shutting down. Jun 24 12:52:43 localhost.localdomain systemd[1]: Stopped target Remote File Systems (Pre). -- Subject: Unit remote-fs-pre.target has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit remote-fs-pre.target has finished shutting down. Jun 24 12:52:43 localhost.localdomain systemd[1]: Stopped target Initrd Default Target. -- Subject: Unit initrd.target has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit initrd.target has finished shutting down. Jun 24 12:52:43 localhost.localdomain systemd[1]: Stopped target Basic System. -- Subject: Unit basic.target has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit basic.target has finished shutting down. Jun 24 12:52:43 localhost.localdomain systemd[1]: Stopped target Sockets. -- Subject: Unit sockets.target has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit sockets.target has finished shutting down. Jun 24 12:52:43 localhost.localdomain systemd[1]: Stopped target Initrd Root Device. -- Subject: Unit initrd-root-device.target has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit initrd-root-device.target has finished shutting down. Jun 24 12:52:43 localhost.localdomain systemd[1]: Stopped target Slices. -- Subject: Unit slices.target has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit slices.target has finished shutting down. Jun 24 12:52:43 localhost.localdomain systemd[1]: Stopped target Timers. -- Subject: Unit timers.target has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit timers.target has finished shutting down. Jun 24 12:52:43 localhost.localdomain systemd[1]: Stopped target System Initialization. -- Subject: Unit sysinit.target has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit sysinit.target has finished shutting down. Jun 24 12:52:43 localhost.localdomain systemd[1]: systemd-tmpfiles-setup.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit systemd-tmpfiles-setup.service has successfully entered the 'dead' state. Jun 24 12:52:43 localhost.localdomain systemd[1]: Stopped Create Volatile Files and Directories. -- Subject: Unit systemd-tmpfiles-setup.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-tmpfiles-setup.service has finished shutting down. Jun 24 12:52:43 localhost.localdomain systemd[1]: Stopped target Swap. -- Subject: Unit swap.target has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit swap.target has finished shutting down. Jun 24 12:52:43 localhost.localdomain systemd[1]: Stopping udev Kernel Device Manager... -- Subject: Unit systemd-udevd.service has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-udevd.service has begun shutting down. Jun 24 12:52:43 localhost.localdomain systemd[1]: dracut-initqueue.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit dracut-initqueue.service has successfully entered the 'dead' state. Jun 24 12:52:43 localhost.localdomain systemd[1]: Stopped dracut initqueue hook. -- Subject: Unit dracut-initqueue.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit dracut-initqueue.service has finished shutting down. Jun 24 12:52:43 localhost.localdomain systemd[1]: systemd-udev-trigger.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit systemd-udev-trigger.service has successfully entered the 'dead' state. Jun 24 12:52:43 localhost.localdomain systemd[1]: Stopped udev Coldplug all Devices. -- Subject: Unit systemd-udev-trigger.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-udev-trigger.service has finished shutting down. Jun 24 12:52:43 localhost.localdomain systemd[1]: Starting Plymouth switch root service... -- Subject: Unit plymouth-switch-root.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit plymouth-switch-root.service has begun starting up. Jun 24 12:52:43 localhost.localdomain systemd[1]: systemd-sysctl.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit systemd-sysctl.service has successfully entered the 'dead' state. Jun 24 12:52:43 localhost.localdomain systemd[1]: Stopped Apply Kernel Variables. -- Subject: Unit systemd-sysctl.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-sysctl.service has finished shutting down. Jun 24 12:52:43 localhost.localdomain systemd[1]: Stopped target Local File Systems. -- Subject: Unit local-fs.target has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit local-fs.target has finished shutting down. Jun 24 12:52:43 localhost.localdomain systemd[1]: Stopped target Paths. -- Subject: Unit paths.target has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit paths.target has finished shutting down. Jun 24 12:52:43 localhost.localdomain systemd[1]: initrd-cleanup.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit initrd-cleanup.service has successfully entered the 'dead' state. Jun 24 12:52:43 localhost.localdomain systemd[1]: Started Cleaning Up and Shutting Down Daemons. -- Subject: Unit initrd-cleanup.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit initrd-cleanup.service has finished starting up. -- -- The start-up result is done. Jun 24 12:52:43 localhost.localdomain systemd[1]: systemd-udevd.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit systemd-udevd.service has successfully entered the 'dead' state. Jun 24 12:52:43 localhost.localdomain systemd[1]: Stopped udev Kernel Device Manager. -- Subject: Unit systemd-udevd.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-udevd.service has finished shutting down. Jun 24 12:52:43 localhost.localdomain systemd[1]: dracut-pre-udev.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit dracut-pre-udev.service has successfully entered the 'dead' state. Jun 24 12:52:43 localhost.localdomain systemd[1]: Stopped dracut pre-udev hook. -- Subject: Unit dracut-pre-udev.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit dracut-pre-udev.service has finished shutting down. Jun 24 12:52:43 localhost.localdomain systemd[1]: dracut-cmdline.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit dracut-cmdline.service has successfully entered the 'dead' state. Jun 24 12:52:43 localhost.localdomain systemd[1]: Stopped dracut cmdline hook. -- Subject: Unit dracut-cmdline.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit dracut-cmdline.service has finished shutting down. Jun 24 12:52:43 localhost.localdomain systemd[1]: systemd-tmpfiles-setup-dev.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit systemd-tmpfiles-setup-dev.service has successfully entered the 'dead' state. Jun 24 12:52:43 localhost.localdomain systemd[1]: Stopped Create Static Device Nodes in /dev. -- Subject: Unit systemd-tmpfiles-setup-dev.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-tmpfiles-setup-dev.service has finished shutting down. Jun 24 12:52:43 localhost.localdomain systemd[1]: kmod-static-nodes.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit kmod-static-nodes.service has successfully entered the 'dead' state. Jun 24 12:52:43 localhost.localdomain systemd[1]: Stopped Create list of required static device nodes for the current kernel. -- Subject: Unit kmod-static-nodes.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit kmod-static-nodes.service has finished shutting down. Jun 24 12:52:43 localhost.localdomain systemd[1]: systemd-udevd-kernel.socket: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit systemd-udevd-kernel.socket has successfully entered the 'dead' state. Jun 24 12:52:43 localhost.localdomain systemd[1]: Closed udev Kernel Socket. -- Subject: Unit systemd-udevd-kernel.socket has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-udevd-kernel.socket has finished shutting down. Jun 24 12:52:43 localhost.localdomain systemd[1]: systemd-udevd-control.socket: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit systemd-udevd-control.socket has successfully entered the 'dead' state. Jun 24 12:52:43 localhost.localdomain systemd[1]: Closed udev Control Socket. -- Subject: Unit systemd-udevd-control.socket has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-udevd-control.socket has finished shutting down. Jun 24 12:52:43 localhost.localdomain systemd[1]: Starting Cleanup udevd DB... -- Subject: Unit initrd-udevadm-cleanup-db.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit initrd-udevadm-cleanup-db.service has begun starting up. Jun 24 12:52:43 localhost.localdomain systemd[1]: initrd-udevadm-cleanup-db.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit initrd-udevadm-cleanup-db.service has successfully entered the 'dead' state. Jun 24 12:52:43 localhost.localdomain systemd[1]: Started Cleanup udevd DB. -- Subject: Unit initrd-udevadm-cleanup-db.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit initrd-udevadm-cleanup-db.service has finished starting up. -- -- The start-up result is done. Jun 24 12:52:43 localhost.localdomain systemd[1]: systemd-vconsole-setup.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit systemd-vconsole-setup.service has successfully entered the 'dead' state. Jun 24 12:52:43 localhost.localdomain systemd[1]: Started Setup Virtual Console. -- Subject: Unit systemd-vconsole-setup.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-vconsole-setup.service has finished starting up. -- -- The start-up result is done. Jun 24 12:52:43 localhost.localdomain systemd[1]: Reached target Switch Root. -- Subject: Unit initrd-switch-root.target has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit initrd-switch-root.target has finished starting up. -- -- The start-up result is done. Jun 24 12:52:43 localhost.localdomain systemd[1]: Started Plymouth switch root service. -- Subject: Unit plymouth-switch-root.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit plymouth-switch-root.service has finished starting up. -- -- The start-up result is done. Jun 24 12:52:43 localhost.localdomain systemd[1]: Starting Switch Root... -- Subject: Unit initrd-switch-root.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit initrd-switch-root.service has begun starting up. Jun 24 12:52:43 localhost.localdomain systemd[1]: Switching root. Jun 24 12:52:44 localhost.localdomain systemd-journald[217]: Journal stopped -- Subject: The journal has been stopped -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The system journal process has shut down and closed all currently -- active journal files. Jun 24 12:52:48 localhost.localdomain kernel: printk: systemd: 16 output lines suppressed due to ratelimiting Jun 24 12:52:48 localhost.localdomain kernel: audit: type=1404 audit(1750783964.794:2): enforcing=1 old_enforcing=0 auid=4294967295 ses=4294967295 enabled=1 old-enabled=1 lsm=selinux res=1 Jun 24 12:52:48 localhost.localdomain kernel: SELinux: policy capability network_peer_controls=1 Jun 24 12:52:48 localhost.localdomain kernel: SELinux: policy capability open_perms=1 Jun 24 12:52:48 localhost.localdomain kernel: SELinux: policy capability extended_socket_class=1 Jun 24 12:52:48 localhost.localdomain kernel: SELinux: policy capability always_check_network=0 Jun 24 12:52:48 localhost.localdomain kernel: SELinux: policy capability cgroup_seclabel=1 Jun 24 12:52:48 localhost.localdomain kernel: SELinux: policy capability nnp_nosuid_transition=1 Jun 24 12:52:48 localhost.localdomain kernel: audit: type=1403 audit(1750783965.771:3): auid=4294967295 ses=4294967295 lsm=selinux res=1 Jun 24 12:52:48 localhost.localdomain systemd[1]: Successfully loaded SELinux policy in 978.888ms. Jun 24 12:52:48 localhost.localdomain systemd[1]: Relabelled /dev, /run and /sys/fs/cgroup in 14.305ms. Jun 24 12:52:48 localhost.localdomain systemd[1]: systemd 239 (239-82.el8.1) running in system mode. (+PAM +AUDIT +SELINUX +IMA -APPARMOR +SMACK +SYSVINIT +UTMP +LIBCRYPTSETUP +GCRYPT +GNUTLS +ACL +XZ +LZ4 +SECCOMP +BLKID +ELFUTILS +KMOD +IDN2 -IDN +PCRE2 default-hierarchy=legacy) Jun 24 12:52:48 localhost.localdomain systemd[1]: Detected virtualization xen. Jun 24 12:52:48 localhost.localdomain systemd[1]: Detected architecture x86-64. Jun 24 12:52:48 localhost.localdomain systemd[1]: Set hostname to . Jun 24 12:52:48 localhost.localdomain systemd[1]: Initializing machine ID from random generator. Jun 24 12:52:48 localhost.localdomain systemd[1]: Installed transient /etc/machine-id file. Jun 24 12:52:48 localhost.localdomain systemd[1]: initrd-switch-root.service: Succeeded. Jun 24 12:52:48 localhost.localdomain systemd[1]: Stopped Switch Root. Jun 24 12:52:48 localhost.localdomain systemd[1]: systemd-journald.service: Service has no hold-off time (RestartSec=0), scheduling restart. Jun 24 12:52:48 localhost.localdomain systemd[1]: systemd-journald.service: Scheduled restart job, restart counter is at 1. Jun 24 12:52:48 localhost.localdomain systemd-journald[501]: Journal started -- Subject: The journal has been started -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The system journal process has started up, opened the journal -- files for writing and is now ready to process requests. Jun 24 12:52:48 localhost.localdomain systemd-journald[501]: Runtime journal (/run/log/journal/94a3f53529c74a0b9a3049b92bfb4a0a) is 8.0M, max 176.9M, 168.9M free. -- Subject: Disk space used by the journal -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Runtime journal (/run/log/journal/94a3f53529c74a0b9a3049b92bfb4a0a) is currently using 8.0M. -- Maximum allowed usage is set to 176.9M. -- Leaving at least 265.4M free (of currently available 1.7G of disk space). -- Enforced usage limit is thus 176.9M, of which 168.9M are still available. -- -- The limits controlling how much disk space is used by the journal may -- be configured with SystemMaxUse=, SystemKeepFree=, SystemMaxFileSize=, -- RuntimeMaxUse=, RuntimeKeepFree=, RuntimeMaxFileSize= settings in -- /etc/systemd/journald.conf. See journald.conf(5) for details. Jun 24 12:52:48 localhost.localdomain systemd[1]: systemd-journald.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit systemd-journald.service has successfully entered the 'dead' state. Jun 24 12:52:48 localhost.localdomain systemd[1]: Started Apply Kernel Variables. -- Subject: Unit systemd-sysctl.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-sysctl.service has finished starting up. -- -- The start-up result is done. Jun 24 12:52:48 localhost.localdomain systemd[1]: Mounted Kernel Debug File System. -- Subject: Unit sys-kernel-debug.mount has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit sys-kernel-debug.mount has finished starting up. -- -- The start-up result is done. Jun 24 12:52:48 localhost.localdomain systemd[1]: Mounted Huge Pages File System. -- Subject: Unit dev-hugepages.mount has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit dev-hugepages.mount has finished starting up. -- -- The start-up result is done. Jun 24 12:52:48 localhost.localdomain systemd[1]: Mounted POSIX Message Queue File System. -- Subject: Unit dev-mqueue.mount has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit dev-mqueue.mount has finished starting up. -- -- The start-up result is done. Jun 24 12:52:48 localhost.localdomain systemd[1]: Started Remount Root and Kernel File Systems. -- Subject: Unit systemd-remount-fs.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-remount-fs.service has finished starting up. -- -- The start-up result is done. Jun 24 12:52:48 localhost.localdomain systemd[1]: Starting Load/Save Random Seed... -- Subject: Unit systemd-random-seed.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-random-seed.service has begun starting up. Jun 24 12:52:48 localhost.localdomain systemd[1]: Starting Flush Journal to Persistent Storage... -- Subject: Unit systemd-journal-flush.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-journal-flush.service has begun starting up. Jun 24 12:52:48 localhost.localdomain systemd[1]: Started Create list of required static device nodes for the current kernel. -- Subject: Unit kmod-static-nodes.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit kmod-static-nodes.service has finished starting up. -- -- The start-up result is done. Jun 24 12:52:48 localhost.localdomain systemd[1]: Started Read and set NIS domainname from /etc/sysconfig/network. -- Subject: Unit nis-domainname.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit nis-domainname.service has finished starting up. -- -- The start-up result is done. Jun 24 12:52:48 localhost.localdomain systemd[1]: Started Load/Save Random Seed. -- Subject: Unit systemd-random-seed.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-random-seed.service has finished starting up. -- -- The start-up result is done. Jun 24 12:52:48 localhost.localdomain systemd-journald[501]: Runtime journal (/run/log/journal/94a3f53529c74a0b9a3049b92bfb4a0a) is 8.0M, max 176.9M, 168.9M free. -- Subject: Disk space used by the journal -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Runtime journal (/run/log/journal/94a3f53529c74a0b9a3049b92bfb4a0a) is currently using 8.0M. -- Maximum allowed usage is set to 176.9M. -- Leaving at least 265.4M free (of currently available 1.7G of disk space). -- Enforced usage limit is thus 176.9M, of which 168.9M are still available. -- -- The limits controlling how much disk space is used by the journal may -- be configured with SystemMaxUse=, SystemKeepFree=, SystemMaxFileSize=, -- RuntimeMaxUse=, RuntimeKeepFree=, RuntimeMaxFileSize= settings in -- /etc/systemd/journald.conf. See journald.conf(5) for details. Jun 24 12:52:48 localhost.localdomain systemd[1]: Starting Create Static Device Nodes in /dev... -- Subject: Unit systemd-tmpfiles-setup-dev.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-tmpfiles-setup-dev.service has begun starting up. Jun 24 12:52:48 localhost.localdomain systemd[1]: Started Flush Journal to Persistent Storage. -- Subject: Unit systemd-journal-flush.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-journal-flush.service has finished starting up. -- -- The start-up result is done. Jun 24 12:52:48 localhost.localdomain systemd[1]: Started udev Coldplug all Devices. -- Subject: Unit systemd-udev-trigger.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-udev-trigger.service has finished starting up. -- -- The start-up result is done. Jun 24 12:52:48 localhost.localdomain systemd[1]: systemd-vconsole-setup.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit systemd-vconsole-setup.service has successfully entered the 'dead' state. Jun 24 12:52:48 localhost.localdomain systemd[1]: Started Setup Virtual Console. -- Subject: Unit systemd-vconsole-setup.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-vconsole-setup.service has finished starting up. -- -- The start-up result is done. Jun 24 12:52:48 localhost.localdomain systemd[1]: Started Create Static Device Nodes in /dev. -- Subject: Unit systemd-tmpfiles-setup-dev.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-tmpfiles-setup-dev.service has finished starting up. -- -- The start-up result is done. Jun 24 12:52:48 localhost.localdomain systemd[1]: Starting udev Kernel Device Manager... -- Subject: Unit systemd-udevd.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-udevd.service has begun starting up. Jun 24 12:52:48 localhost.localdomain systemd[1]: Reached target Local File Systems (Pre). -- Subject: Unit local-fs-pre.target has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit local-fs-pre.target has finished starting up. -- -- The start-up result is done. Jun 24 12:52:48 localhost.localdomain systemd[1]: Reached target Local File Systems. -- Subject: Unit local-fs.target has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit local-fs.target has finished starting up. -- -- The start-up result is done. Jun 24 12:52:48 localhost.localdomain systemd[1]: Starting Commit a transient machine-id on disk... -- Subject: Unit systemd-machine-id-commit.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-machine-id-commit.service has begun starting up. Jun 24 12:52:48 localhost.localdomain systemd[1]: Starting Import network configuration from initramfs... -- Subject: Unit import-state.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit import-state.service has begun starting up. Jun 24 12:52:48 localhost.localdomain systemd[1]: Starting Tell Plymouth To Write Out Runtime Data... -- Subject: Unit plymouth-read-write.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit plymouth-read-write.service has begun starting up. Jun 24 12:52:49 localhost.localdomain systemd[1]: Started Commit a transient machine-id on disk. -- Subject: Unit systemd-machine-id-commit.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-machine-id-commit.service has finished starting up. -- -- The start-up result is done. Jun 24 12:52:49 localhost.localdomain systemd[1]: Received SIGRTMIN+20 from PID 339 (plymouthd). Jun 24 12:52:49 localhost.localdomain systemd[1]: Started Tell Plymouth To Write Out Runtime Data. -- Subject: Unit plymouth-read-write.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit plymouth-read-write.service has finished starting up. -- -- The start-up result is done. Jun 24 12:52:49 localhost.localdomain systemd[1]: Started Import network configuration from initramfs. -- Subject: Unit import-state.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit import-state.service has finished starting up. -- -- The start-up result is done. Jun 24 12:52:49 localhost.localdomain systemd[1]: Starting Create Volatile Files and Directories... -- Subject: Unit systemd-tmpfiles-setup.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-tmpfiles-setup.service has begun starting up. Jun 24 12:52:49 localhost.localdomain systemd[1]: Started Create Volatile Files and Directories. -- Subject: Unit systemd-tmpfiles-setup.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-tmpfiles-setup.service has finished starting up. -- -- The start-up result is done. Jun 24 12:52:49 localhost.localdomain systemd[1]: Starting RPC Bind... -- Subject: Unit rpcbind.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit rpcbind.service has begun starting up. Jun 24 12:52:49 localhost.localdomain systemd[1]: Starting Security Auditing Service... -- Subject: Unit auditd.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit auditd.service has begun starting up. Jun 24 12:52:49 localhost.localdomain systemd[1]: Started RPC Bind. -- Subject: Unit rpcbind.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit rpcbind.service has finished starting up. -- -- The start-up result is done. Jun 24 12:52:49 localhost.localdomain systemd-udevd[533]: Network interface NamePolicy= disabled on kernel command line, ignoring. Jun 24 12:52:49 localhost.localdomain auditd[556]: No plugins found, not dispatching events Jun 24 12:52:49 localhost.localdomain auditd[556]: Init complete, auditd 3.1.2 listening for events (startup state enable) Jun 24 12:52:49 localhost.localdomain systemd[1]: etc-machine\x2did.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit etc-machine\x2did.mount has successfully entered the 'dead' state. Jun 24 12:52:49 localhost.localdomain systemd[1]: Mounting RPC Pipe File System... -- Subject: Unit var-lib-nfs-rpc_pipefs.mount has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit var-lib-nfs-rpc_pipefs.mount has begun starting up. Jun 24 12:52:49 localhost.localdomain augenrules[559]: /sbin/augenrules: No change Jun 24 12:52:49 localhost.localdomain kernel: RPC: Registered named UNIX socket transport module. Jun 24 12:52:49 localhost.localdomain kernel: RPC: Registered udp transport module. Jun 24 12:52:49 localhost.localdomain kernel: RPC: Registered tcp transport module. Jun 24 12:52:49 localhost.localdomain kernel: RPC: Registered tcp NFSv4.1 backchannel transport module. Jun 24 12:52:49 localhost.localdomain systemd[1]: Mounted RPC Pipe File System. -- Subject: Unit var-lib-nfs-rpc_pipefs.mount has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit var-lib-nfs-rpc_pipefs.mount has finished starting up. -- -- The start-up result is done. Jun 24 12:52:49 localhost.localdomain systemd[1]: Reached target rpc_pipefs.target. -- Subject: Unit rpc_pipefs.target has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit rpc_pipefs.target has finished starting up. -- -- The start-up result is done. Jun 24 12:52:49 localhost.localdomain augenrules[574]: No rules Jun 24 12:52:49 localhost.localdomain augenrules[574]: enabled 1 Jun 24 12:52:49 localhost.localdomain augenrules[574]: failure 1 Jun 24 12:52:49 localhost.localdomain augenrules[574]: pid 556 Jun 24 12:52:49 localhost.localdomain augenrules[574]: rate_limit 0 Jun 24 12:52:49 localhost.localdomain augenrules[574]: backlog_limit 8192 Jun 24 12:52:49 localhost.localdomain augenrules[574]: lost 0 Jun 24 12:52:49 localhost.localdomain augenrules[574]: backlog 4 Jun 24 12:52:49 localhost.localdomain augenrules[574]: backlog_wait_time 60000 Jun 24 12:52:49 localhost.localdomain augenrules[574]: backlog_wait_time_actual 0 Jun 24 12:52:49 localhost.localdomain augenrules[574]: enabled 1 Jun 24 12:52:49 localhost.localdomain augenrules[574]: failure 1 Jun 24 12:52:49 localhost.localdomain augenrules[574]: pid 556 Jun 24 12:52:49 localhost.localdomain augenrules[574]: rate_limit 0 Jun 24 12:52:49 localhost.localdomain augenrules[574]: backlog_limit 8192 Jun 24 12:52:49 localhost.localdomain augenrules[574]: lost 0 Jun 24 12:52:49 localhost.localdomain augenrules[574]: backlog 4 Jun 24 12:52:49 localhost.localdomain augenrules[574]: backlog_wait_time 60000 Jun 24 12:52:49 localhost.localdomain augenrules[574]: backlog_wait_time_actual 0 Jun 24 12:52:49 localhost.localdomain augenrules[574]: enabled 1 Jun 24 12:52:49 localhost.localdomain augenrules[574]: failure 1 Jun 24 12:52:49 localhost.localdomain augenrules[574]: pid 556 Jun 24 12:52:49 localhost.localdomain augenrules[574]: rate_limit 0 Jun 24 12:52:49 localhost.localdomain augenrules[574]: backlog_limit 8192 Jun 24 12:52:49 localhost.localdomain augenrules[574]: lost 0 Jun 24 12:52:49 localhost.localdomain augenrules[574]: backlog 0 Jun 24 12:52:49 localhost.localdomain augenrules[574]: backlog_wait_time 60000 Jun 24 12:52:49 localhost.localdomain augenrules[574]: backlog_wait_time_actual 0 Jun 24 12:52:49 localhost.localdomain systemd[1]: Started Security Auditing Service. -- Subject: Unit auditd.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit auditd.service has finished starting up. -- -- The start-up result is done. Jun 24 12:52:49 localhost.localdomain systemd[1]: Starting Update UTMP about System Boot/Shutdown... -- Subject: Unit systemd-update-utmp.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-update-utmp.service has begun starting up. Jun 24 12:52:49 localhost.localdomain systemd[1]: Started Update UTMP about System Boot/Shutdown. -- Subject: Unit systemd-update-utmp.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-update-utmp.service has finished starting up. -- -- The start-up result is done. Jun 24 12:52:49 localhost.localdomain systemd[1]: Started udev Kernel Device Manager. -- Subject: Unit systemd-udevd.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-udevd.service has finished starting up. -- -- The start-up result is done. Jun 24 12:52:49 localhost.localdomain systemd[1]: Reached target System Initialization. -- Subject: Unit sysinit.target has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit sysinit.target has finished starting up. -- -- The start-up result is done. Jun 24 12:52:49 localhost.localdomain systemd[1]: Started Daily Cleanup of Temporary Directories. -- Subject: Unit systemd-tmpfiles-clean.timer has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-tmpfiles-clean.timer has finished starting up. -- -- The start-up result is done. Jun 24 12:52:49 localhost.localdomain systemd[1]: Listening on SSSD Kerberos Cache Manager responder socket. -- Subject: Unit sssd-kcm.socket has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit sssd-kcm.socket has finished starting up. -- -- The start-up result is done. Jun 24 12:52:49 localhost.localdomain systemd[1]: Started dnf makecache --timer. -- Subject: Unit dnf-makecache.timer has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit dnf-makecache.timer has finished starting up. -- -- The start-up result is done. Jun 24 12:52:49 localhost.localdomain systemd[1]: Listening on D-Bus System Message Bus Socket. -- Subject: Unit dbus.socket has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit dbus.socket has finished starting up. -- -- The start-up result is done. Jun 24 12:52:49 localhost.localdomain systemd[1]: Reached target Sockets. -- Subject: Unit sockets.target has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit sockets.target has finished starting up. -- -- The start-up result is done. Jun 24 12:52:49 localhost.localdomain systemd[1]: Reached target Basic System. -- Subject: Unit basic.target has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit basic.target has finished starting up. -- -- The start-up result is done. Jun 24 12:52:49 localhost.localdomain systemd[1]: Started irqbalance daemon. -- Subject: Unit irqbalance.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit irqbalance.service has finished starting up. -- -- The start-up result is done. Jun 24 12:52:49 localhost.localdomain systemd[1]: Starting NTP client/server... -- Subject: Unit chronyd.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit chronyd.service has begun starting up. Jun 24 12:52:49 localhost.localdomain systemd[1]: Reached target sshd-keygen.target. -- Subject: Unit sshd-keygen.target has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit sshd-keygen.target has finished starting up. -- -- The start-up result is done. Jun 24 12:52:49 localhost.localdomain systemd[1]: Reached target User and Group Name Lookups. -- Subject: Unit nss-user-lookup.target has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit nss-user-lookup.target has finished starting up. -- -- The start-up result is done. Jun 24 12:52:49 localhost.localdomain systemd[1]: Starting Restore /run/initramfs on shutdown... -- Subject: Unit dracut-shutdown.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit dracut-shutdown.service has begun starting up. Jun 24 12:52:49 localhost.localdomain systemd[1]: Starting Initial cloud-init job (pre-networking)... -- Subject: Unit cloud-init-local.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit cloud-init-local.service has begun starting up. Jun 24 12:52:49 localhost.localdomain systemd[1]: Started D-Bus System Message Bus. -- Subject: Unit dbus.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit dbus.service has finished starting up. -- -- The start-up result is done. Jun 24 12:52:49 localhost.localdomain systemd[1]: Started Hardware RNG Entropy Gatherer Daemon. -- Subject: Unit rngd.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit rngd.service has finished starting up. -- -- The start-up result is done. Jun 24 12:52:49 localhost.localdomain systemd[1]: Starting Login Service... -- Subject: Unit systemd-logind.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-logind.service has begun starting up. Jun 24 12:52:49 localhost.localdomain systemd[1]: Started daily update of the root trust anchor for DNSSEC. -- Subject: Unit unbound-anchor.timer has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit unbound-anchor.timer has finished starting up. -- -- The start-up result is done. Jun 24 12:52:49 localhost.localdomain systemd[1]: Reached target Timers. -- Subject: Unit timers.target has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit timers.target has finished starting up. -- -- The start-up result is done. Jun 24 12:52:49 localhost.localdomain systemd[1]: Started Restore /run/initramfs on shutdown. -- Subject: Unit dracut-shutdown.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit dracut-shutdown.service has finished starting up. -- -- The start-up result is done. Jun 24 12:52:49 localhost.localdomain systemd[1]: Starting update of the root trust anchor for DNSSEC validation in unbound... -- Subject: Unit unbound-anchor.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit unbound-anchor.service has begun starting up. Jun 24 12:52:49 localhost.localdomain kernel: input: PC Speaker as /devices/platform/pcspkr/input/input5 Jun 24 12:52:49 localhost.localdomain rngd[600]: Disabling 7: PKCS11 Entropy generator (pkcs11) Jun 24 12:52:49 localhost.localdomain rngd[600]: Disabling 5: NIST Network Entropy Beacon (nist) Jun 24 12:52:49 localhost.localdomain rngd[600]: Disabling 9: Qrypt quantum entropy beacon (qrypt) Jun 24 12:52:49 localhost.localdomain rngd[600]: Initializing available sources Jun 24 12:52:49 localhost.localdomain rngd[600]: [hwrng ]: Initialization Failed Jun 24 12:52:49 localhost.localdomain rngd[600]: [rdrand]: Enabling RDRAND rng support Jun 24 12:52:49 localhost.localdomain rngd[600]: [rdrand]: Initialized Jun 24 12:52:49 localhost.localdomain rngd[600]: [jitter]: JITTER timeout set to 5 sec Jun 24 12:52:49 localhost.localdomain chronyd[606]: chronyd version 4.5 starting (+CMDMON +NTP +REFCLOCK +RTC +PRIVDROP +SCFILTER +SIGND +ASYNCDNS +NTS +SECHASH +IPV6 +DEBUG) Jun 24 12:52:49 localhost.localdomain chronyd[606]: Loaded 0 symmetric keys Jun 24 12:52:49 localhost.localdomain systemd-logind[602]: New seat seat0. -- Subject: A new seat seat0 is now available -- Defined-By: systemd -- Support: https://access.redhat.com/support -- Documentation: https://www.freedesktop.org/wiki/Software/systemd/multiseat -- -- A new seat seat0 has been configured and is now available. Jun 24 12:52:49 localhost.localdomain chronyd[606]: Frequency 0.000 +/- 1000000.000 ppm read from /var/lib/chrony/drift Jun 24 12:52:49 localhost.localdomain chronyd[606]: Using right/UTC timezone to obtain leap second data Jun 24 12:52:49 localhost.localdomain rngd[600]: [jitter]: Initializing AES buffer Jun 24 12:52:49 localhost.localdomain systemd-logind[602]: Watching system buttons on /dev/input/event0 (Power Button) Jun 24 12:52:49 localhost.localdomain systemd-logind[602]: Watching system buttons on /dev/input/event1 (Sleep Button) Jun 24 12:52:49 localhost.localdomain kernel: piix4_smbus 0000:00:01.3: SMBus base address uninitialized - upgrade BIOS or use force_addr=0xaddr Jun 24 12:52:50 localhost.localdomain systemd[1]: Started Login Service. -- Subject: Unit systemd-logind.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-logind.service has finished starting up. -- -- The start-up result is done. Jun 24 12:52:50 localhost.localdomain systemd-logind[602]: Watching system buttons on /dev/input/event2 (AT Translated Set 2 keyboard) Jun 24 12:52:50 localhost.localdomain systemd[1]: Started NTP client/server. -- Subject: Unit chronyd.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit chronyd.service has finished starting up. -- -- The start-up result is done. Jun 24 12:52:50 localhost.localdomain systemd-udevd[590]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable. Jun 24 12:52:50 localhost.localdomain systemd[1]: unbound-anchor.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit unbound-anchor.service has successfully entered the 'dead' state. Jun 24 12:52:50 localhost.localdomain systemd[1]: Started update of the root trust anchor for DNSSEC validation in unbound. -- Subject: Unit unbound-anchor.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit unbound-anchor.service has finished starting up. -- -- The start-up result is done. Jun 24 12:52:50 localhost.localdomain kernel: RAPL PMU: API unit is 2^-32 Joules, 0 fixed counters, 655360 ms ovfl timer Jun 24 12:52:50 localhost.localdomain systemd-udevd[587]: Using default interface naming scheme 'rhel-8.0'. Jun 24 12:52:50 localhost.localdomain systemd-udevd[587]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable. Jun 24 12:52:50 localhost.localdomain kernel: EDAC sbridge: Seeking for: PCI ID 8086:2fa0 Jun 24 12:52:50 localhost.localdomain kernel: EDAC sbridge: Ver: 1.1.2 Jun 24 12:52:54 localhost.localdomain rngd[600]: [jitter]: Unable to obtain AES key, disabling JITTER source Jun 24 12:52:54 localhost.localdomain rngd[600]: [jitter]: Initialization Failed Jun 24 12:52:54 localhost.localdomain rngd[600]: Process privileges have been dropped to 2:2 Jun 24 12:52:55 localhost.localdomain dhclient[644]: Internet Systems Consortium DHCP Client 4.3.6 Jun 24 12:52:55 localhost.localdomain dhclient[644]: Copyright 2004-2017 Internet Systems Consortium. Jun 24 12:52:55 localhost.localdomain dhclient[644]: All rights reserved. Jun 24 12:52:55 localhost.localdomain dhclient[644]: For info, please visit https://www.isc.org/software/dhcp/ Jun 24 12:52:55 localhost.localdomain dhclient[644]: Jun 24 12:52:55 localhost.localdomain dhclient[644]: Listening on LPF/eth0/0e:81:d3:0c:8b:59 Jun 24 12:52:55 localhost.localdomain dhclient[644]: Sending on LPF/eth0/0e:81:d3:0c:8b:59 Jun 24 12:52:55 localhost.localdomain dhclient[644]: Sending on Socket/fallback Jun 24 12:52:55 localhost.localdomain dhclient[644]: Created duid "\000\004\224\243\3655)\307J\013\2320I\271+\373J\012". Jun 24 12:52:55 localhost.localdomain dhclient[644]: DHCPDISCOVER on eth0 to 255.255.255.255 port 67 interval 3 (xid=0xd416e065) Jun 24 12:52:55 localhost.localdomain dhclient[644]: DHCPREQUEST on eth0 to 255.255.255.255 port 67 (xid=0xd416e065) Jun 24 12:52:55 localhost.localdomain dhclient[644]: DHCPOFFER from 10.31.40.1 Jun 24 12:52:55 localhost.localdomain dhclient[644]: DHCPACK from 10.31.40.1 (xid=0xd416e065) Jun 24 12:52:55 localhost.localdomain dhclient[644]: bound to 10.31.43.41 -- renewal in 1677 seconds. Jun 24 12:52:55 localhost.localdomain dbus-daemon[599]: [system] Activating via systemd: service name='org.freedesktop.hostname1' unit='dbus-org.freedesktop.hostname1.service' requested by ':1.3' (uid=0 pid=659 comm="hostnamectl set-hostname ip-10-31-43-41.testing-fa" label="system_u:system_r:cloud_init_t:s0") Jun 24 12:52:55 localhost.localdomain systemd[1]: Starting Hostname Service... -- Subject: Unit systemd-hostnamed.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-hostnamed.service has begun starting up. Jun 24 12:52:55 localhost.localdomain dbus-daemon[599]: [system] Successfully activated service 'org.freedesktop.hostname1' Jun 24 12:52:55 localhost.localdomain systemd[1]: Started Hostname Service. -- Subject: Unit systemd-hostnamed.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-hostnamed.service has finished starting up. -- -- The start-up result is done. Jun 24 12:52:55 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd-hostnamed[660]: Changed static host name to 'ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com' Jun 24 12:52:55 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd-hostnamed[660]: Changed host name to 'ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com' Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[642]: Cloud-init v. 23.4-7.el8.2 running 'init-local' at Tue, 24 Jun 2025 16:52:54 +0000. Up 14.84 seconds. Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started Initial cloud-init job (pre-networking). -- Subject: Unit cloud-init-local.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit cloud-init-local.service has finished starting up. -- -- The start-up result is done. Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Reached target Network (Pre). -- Subject: Unit network-pre.target has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit network-pre.target has finished starting up. -- -- The start-up result is done. Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting Network Manager... -- Subject: Unit NetworkManager.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit NetworkManager.service has begun starting up. Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com NetworkManager[665]: [1750783976.1143] NetworkManager (version 1.40.16-15.el8) is starting... (boot:a5524a98-9c86-429d-b92b-6db6625b8203) Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com NetworkManager[665]: [1750783976.1168] Read config: /etc/NetworkManager/NetworkManager.conf (run: 15-carrier-timeout.conf) Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started Network Manager. -- Subject: Unit NetworkManager.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit NetworkManager.service has finished starting up. -- -- The start-up result is done. Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting Network Manager Wait Online... -- Subject: Unit NetworkManager-wait-online.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit NetworkManager-wait-online.service has begun starting up. Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Reached target Network. -- Subject: Unit network.target has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit network.target has finished starting up. -- -- The start-up result is done. Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting Dynamic System Tuning Daemon... -- Subject: Unit tuned.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit tuned.service has begun starting up. Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting GSSAPI Proxy Daemon... -- Subject: Unit gssproxy.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit gssproxy.service has begun starting up. Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com NetworkManager[665]: [1750783976.1256] bus-manager: acquired D-Bus service "org.freedesktop.NetworkManager" Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com NetworkManager[665]: [1750783976.1372] manager[0x563064c04080]: monitoring kernel firmware directory '/lib/firmware'. Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com NetworkManager[665]: [1750783976.1404] hostname: hostname: using hostnamed Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com NetworkManager[665]: [1750783976.1405] hostname: static hostname changed from (none) to "ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com" Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com NetworkManager[665]: [1750783976.1408] dns-mgr: init: dns=default,systemd-resolved rc-manager=symlink Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com NetworkManager[665]: [1750783976.1533] Loaded device plugin: NMTeamFactory (/usr/lib64/NetworkManager/1.40.16-15.el8/libnm-device-plugin-team.so) Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dbus-daemon[599]: [system] Activating via systemd: service name='org.freedesktop.nm_dispatcher' unit='dbus-org.freedesktop.nm-dispatcher.service' requested by ':1.5' (uid=0 pid=665 comm="/usr/sbin/NetworkManager --no-daemon " label="system_u:system_r:NetworkManager_t:s0") Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com NetworkManager[665]: [1750783976.1534] manager: rfkill: Wi-Fi enabled by radio killswitch; enabled by state file Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com NetworkManager[665]: [1750783976.1534] manager: rfkill: WWAN enabled by radio killswitch; enabled by state file Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com NetworkManager[665]: [1750783976.1535] manager: Networking is enabled by state file Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting Network Manager Script Dispatcher Service... -- Subject: Unit NetworkManager-dispatcher.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit NetworkManager-dispatcher.service has begun starting up. Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com NetworkManager[665]: [1750783976.1611] settings: Loaded settings plugin: ifcfg-rh ("/usr/lib64/NetworkManager/1.40.16-15.el8/libnm-settings-plugin-ifcfg-rh.so") Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com NetworkManager[665]: [1750783976.1612] settings: Loaded settings plugin: keyfile (internal) Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com NetworkManager[665]: [1750783976.1700] dhcp: init: Using DHCP client 'internal' Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com NetworkManager[665]: [1750783976.1700] device (lo): carrier: link connected Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com NetworkManager[665]: [1750783976.1703] manager: (lo): new Generic device (/org/freedesktop/NetworkManager/Devices/1) Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com NetworkManager[665]: [1750783976.1713] manager: (eth0): new Ethernet device (/org/freedesktop/NetworkManager/Devices/2) Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com NetworkManager[665]: [1750783976.1716] device (eth0): state change: unmanaged -> unavailable (reason 'managed', sys-iface-state: 'external') Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com kernel: IPv6: ADDRCONF(NETDEV_UP): eth0: link is not ready Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com NetworkManager[665]: [1750783976.1812] device (eth0): carrier: link connected Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com NetworkManager[665]: [1750783976.1816] device (eth0): state change: unavailable -> disconnected (reason 'carrier-changed', sys-iface-state: 'managed') Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started GSSAPI Proxy Daemon. -- Subject: Unit gssproxy.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit gssproxy.service has finished starting up. -- -- The start-up result is done. Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com NetworkManager[665]: [1750783976.1822] policy: auto-activating connection 'System eth0' (5fb06bd0-0bb0-7ffb-45f1-d6edd65f3e03) Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Reached target NFS client services. -- Subject: Unit nfs-client.target has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit nfs-client.target has finished starting up. -- -- The start-up result is done. Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Reached target Remote File Systems (Pre). -- Subject: Unit remote-fs-pre.target has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit remote-fs-pre.target has finished starting up. -- -- The start-up result is done. Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Reached target Remote File Systems. -- Subject: Unit remote-fs.target has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit remote-fs.target has finished starting up. -- -- The start-up result is done. Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com NetworkManager[665]: [1750783976.1828] device (eth0): Activation: starting connection 'System eth0' (5fb06bd0-0bb0-7ffb-45f1-d6edd65f3e03) Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com NetworkManager[665]: [1750783976.1829] device (eth0): state change: disconnected -> prepare (reason 'none', sys-iface-state: 'managed') Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com NetworkManager[665]: [1750783976.1832] manager: NetworkManager state is now CONNECTING Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com NetworkManager[665]: [1750783976.1834] device (eth0): state change: prepare -> config (reason 'none', sys-iface-state: 'managed') Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com NetworkManager[665]: [1750783976.1848] device (eth0): state change: config -> ip-config (reason 'none', sys-iface-state: 'managed') Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com NetworkManager[665]: [1750783976.1877] dhcp4 (eth0): activation: beginning transaction (timeout in 45 seconds) Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com NetworkManager[665]: [1750783976.1893] dhcp4 (eth0): state changed new lease, address=10.31.43.41 Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com NetworkManager[665]: [1750783976.1897] policy: set 'System eth0' (eth0) as default for IPv4 routing and DNS Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dbus-daemon[599]: [system] Activating via systemd: service name='org.freedesktop.resolve1' unit='dbus-org.freedesktop.resolve1.service' requested by ':1.5' (uid=0 pid=665 comm="/usr/sbin/NetworkManager --no-daemon " label="system_u:system_r:NetworkManager_t:s0") Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dbus-daemon[599]: [system] Activation via systemd failed for unit 'dbus-org.freedesktop.resolve1.service': Unit dbus-org.freedesktop.resolve1.service not found. Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dbus-daemon[599]: [system] Successfully activated service 'org.freedesktop.nm_dispatcher' Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started Network Manager Script Dispatcher Service. -- Subject: Unit NetworkManager-dispatcher.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit NetworkManager-dispatcher.service has finished starting up. -- -- The start-up result is done. Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com NetworkManager[665]: [1750783976.1965] device (eth0): state change: ip-config -> ip-check (reason 'none', sys-iface-state: 'managed') Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com NetworkManager[665]: [1750783976.2028] device (eth0): state change: ip-check -> secondaries (reason 'none', sys-iface-state: 'managed') Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com NetworkManager[665]: [1750783976.2029] device (eth0): state change: secondaries -> activated (reason 'none', sys-iface-state: 'managed') Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com NetworkManager[665]: [1750783976.2032] manager: NetworkManager state is now CONNECTED_SITE Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com NetworkManager[665]: [1750783976.2034] device (eth0): Activation: successful, device activated. Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com NetworkManager[665]: [1750783976.2038] manager: NetworkManager state is now CONNECTED_GLOBAL Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com NetworkManager[665]: [1750783976.2041] manager: startup complete Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started Network Manager Wait Online. -- Subject: Unit NetworkManager-wait-online.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit NetworkManager-wait-online.service has finished starting up. -- -- The start-up result is done. Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting Initial cloud-init job (metadata service crawler)... -- Subject: Unit cloud-init.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit cloud-init.service has begun starting up. Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com chronyd[606]: Added source 10.2.32.37 Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com chronyd[606]: Added source 10.2.32.38 Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com chronyd[606]: Added source 10.11.160.238 Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com chronyd[606]: Added source 10.18.100.10 Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dbus-daemon[599]: [system] Activating via systemd: service name='org.freedesktop.PolicyKit1' unit='polkit.service' requested by ':1.9' (uid=0 pid=669 comm="/usr/libexec/platform-python -Es /usr/sbin/tuned -" label="system_u:system_r:tuned_t:s0") Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting Authorization Manager... -- Subject: Unit polkit.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit polkit.service has begun starting up. Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: Cloud-init v. 23.4-7.el8.2 running 'init' at Tue, 24 Jun 2025 16:52:56 +0000. Up 16.67 seconds. Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: ci-info: ++++++++++++++++++++++++++++++++++++++Net device info+++++++++++++++++++++++++++++++++++++++ Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: ci-info: +--------+------+-----------------------------+---------------+--------+-------------------+ Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: ci-info: | Device | Up | Address | Mask | Scope | Hw-Address | Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: ci-info: +--------+------+-----------------------------+---------------+--------+-------------------+ Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: ci-info: | eth0 | True | 10.31.43.41 | 255.255.252.0 | global | 0e:81:d3:0c:8b:59 | Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: ci-info: | eth0 | True | fe80::c81:d3ff:fe0c:8b59/64 | . | link | 0e:81:d3:0c:8b:59 | Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: ci-info: | lo | True | 127.0.0.1 | 255.0.0.0 | host | . | Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: ci-info: | lo | True | ::1/128 | . | host | . | Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: ci-info: +--------+------+-----------------------------+---------------+--------+-------------------+ Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: ci-info: ++++++++++++++++++++++++++++Route IPv4 info+++++++++++++++++++++++++++++ Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: ci-info: +-------+-------------+------------+---------------+-----------+-------+ Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: ci-info: | Route | Destination | Gateway | Genmask | Interface | Flags | Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: ci-info: +-------+-------------+------------+---------------+-----------+-------+ Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: ci-info: | 0 | 0.0.0.0 | 10.31.40.1 | 0.0.0.0 | eth0 | UG | Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: ci-info: | 1 | 10.31.40.0 | 0.0.0.0 | 255.255.252.0 | eth0 | U | Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: ci-info: +-------+-------------+------------+---------------+-----------+-------+ Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: ci-info: +++++++++++++++++++Route IPv6 info+++++++++++++++++++ Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: ci-info: +-------+-------------+---------+-----------+-------+ Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: ci-info: | Route | Destination | Gateway | Interface | Flags | Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: ci-info: +-------+-------------+---------+-----------+-------+ Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: ci-info: | 1 | fe80::/64 | :: | eth0 | U | Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: ci-info: | 3 | multicast | :: | eth0 | U | Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: ci-info: +-------+-------------+---------+-----------+-------+ Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started Dynamic System Tuning Daemon. -- Subject: Unit tuned.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit tuned.service has finished starting up. -- -- The start-up result is done. Jun 24 12:52:56 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com polkitd[938]: Started polkitd version 0.115 Jun 24 12:52:57 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com polkitd[938]: Loading rules from directory /etc/polkit-1/rules.d Jun 24 12:52:57 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com polkitd[938]: Loading rules from directory /usr/share/polkit-1/rules.d Jun 24 12:52:57 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com polkitd[938]: Finished loading, compiling and executing 2 rules Jun 24 12:52:57 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dbus-daemon[599]: [system] Successfully activated service 'org.freedesktop.PolicyKit1' Jun 24 12:52:57 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started Authorization Manager. -- Subject: Unit polkit.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit polkit.service has finished starting up. -- -- The start-up result is done. Jun 24 12:52:57 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com polkitd[938]: Acquired the name org.freedesktop.PolicyKit1 on the system bus Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: Generating public/private rsa key pair. Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: Your identification has been saved in /etc/ssh/ssh_host_rsa_key. Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: Your public key has been saved in /etc/ssh/ssh_host_rsa_key.pub. Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: The key fingerprint is: Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: SHA256:+w0CxLMUm7O2MT3aDW2Fy+/0gO01JNhKSMh8dIwevDA root@ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: The key's randomart image is: Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: +---[RSA 3072]----+ Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: | ...o. | Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: | +E*+... | Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: | %+oo. . | Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: | o Oo+ = | Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: | B S B o . | Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: | . B B = o | Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: | o + = = o | Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: | o * + . | Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: | . + . | Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: +----[SHA256]-----+ Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: Generating public/private dsa key pair. Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: Your identification has been saved in /etc/ssh/ssh_host_dsa_key. Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: Your public key has been saved in /etc/ssh/ssh_host_dsa_key.pub. Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: The key fingerprint is: Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: SHA256:qHOQ/V+RzWYD/ouCE/nxV1BJhuMGSvvmPSY3Ru7j32w root@ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: The key's randomart image is: Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: +---[DSA 1024]----+ Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: | oo.| Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: | . . o...| Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: | . o + .. | Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: | o . o . B. | Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: | o o S.. = *. | Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: | o .o .o * ..| Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: | o . .+oo= .. | Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: | o o.o+.@.oE| Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: | ...O+*oo| Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: +----[SHA256]-----+ Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: Generating public/private ecdsa key pair. Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: Your identification has been saved in /etc/ssh/ssh_host_ecdsa_key. Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: Your public key has been saved in /etc/ssh/ssh_host_ecdsa_key.pub. Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: The key fingerprint is: Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: SHA256:eLvaZVrNf3+DyMvggFRhpjiypvVbvYR9xZi7HUmlewo root@ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: The key's randomart image is: Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: +---[ECDSA 256]---+ Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: | + | Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: | . + . | Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: | . o . . . | Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: | o . .. + o | Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: | o. .. S o = | Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: |o. .. .= . * o | Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: |. ..o.=.E B o | Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: | o ooOo* * .o| Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: | . ..=..o+ ..=| Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: +----[SHA256]-----+ Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: Generating public/private ed25519 key pair. Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: Your identification has been saved in /etc/ssh/ssh_host_ed25519_key. Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: Your public key has been saved in /etc/ssh/ssh_host_ed25519_key.pub. Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: The key fingerprint is: Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: SHA256:lT5ha7UudOaAkVDzJ8Fxf1O7AOWKp2XhOuBNnOwOD+o root@ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: The key's randomart image is: Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: +--[ED25519 256]--+ Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: | ..o.ooo. .| Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: | . +.+o. o| Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: | o B.oo.o.| Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: | o*+*o...o| Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: | .S*B*+ . | Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: | . =o*B | Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: | + *. o | Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: | . = .. | Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: | .E o | Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[780]: +----[SHA256]-----+ Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started Initial cloud-init job (metadata service crawler). -- Subject: Unit cloud-init.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit cloud-init.service has finished starting up. -- -- The start-up result is done. Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Reached target Cloud-config availability. -- Subject: Unit cloud-config.target has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit cloud-config.target has finished starting up. -- -- The start-up result is done. Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting OpenSSH server daemon... -- Subject: Unit sshd.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit sshd.service has begun starting up. Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Reached target Network is Online. -- Subject: Unit network-online.target has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit network-online.target has finished starting up. -- -- The start-up result is done. Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting Apply the settings specified in cloud-config... -- Subject: Unit cloud-config.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit cloud-config.service has begun starting up. Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting Crash recovery kernel arming... -- Subject: Unit kdump.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit kdump.service has begun starting up. Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting System Logging Service... -- Subject: Unit rsyslog.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit rsyslog.service has begun starting up. Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting The restraint harness.... -- Subject: Unit restraintd.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit restraintd.service has begun starting up. Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting Notify NFS peers of a restart... -- Subject: Unit rpc-statd-notify.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit rpc-statd-notify.service has begun starting up. Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com sshd[1022]: Server listening on 0.0.0.0 port 22. Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com sshd[1022]: Server listening on :: port 22. Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started OpenSSH server daemon. -- Subject: Unit sshd.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit sshd.service has finished starting up. -- -- The start-up result is done. Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com sm-notify[1027]: Version 2.3.3 starting Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started Notify NFS peers of a restart. -- Subject: Unit rpc-statd-notify.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit rpc-statd-notify.service has finished starting up. -- -- The start-up result is done. Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started The restraint harness.. -- Subject: Unit restraintd.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit restraintd.service has finished starting up. -- -- The start-up result is done. Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com rsyslogd[1025]: [origin software="rsyslogd" swVersion="8.2102.0-15.el8" x-pid="1025" x-info="https://www.rsyslog.com"] start Jun 24 12:53:00 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started System Logging Service. -- Subject: Unit rsyslog.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit rsyslog.service has finished starting up. -- -- The start-up result is done. Jun 24 12:53:01 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com rsyslogd[1025]: imjournal: journal files changed, reloading... [v8.2102.0-15.el8 try https://www.rsyslog.com/e/0 ] Jun 24 12:53:01 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com restraintd[1030]: Listening on http://localhost:8081 Jun 24 12:53:01 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com kdumpctl[1033]: kdump: Detected change(s) in the following file(s): /etc/fstab Jun 24 12:53:01 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[1270]: Cloud-init v. 23.4-7.el8.2 running 'modules:config' at Tue, 24 Jun 2025 16:53:01 +0000. Up 21.32 seconds. Jun 24 12:53:01 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started Apply the settings specified in cloud-config. -- Subject: Unit cloud-config.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit cloud-config.service has finished starting up. -- -- The start-up result is done. Jun 24 12:53:01 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting Execute cloud user/final scripts... -- Subject: Unit cloud-final.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit cloud-final.service has begun starting up. Jun 24 12:53:01 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting Permit User Sessions... -- Subject: Unit systemd-user-sessions.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-user-sessions.service has begun starting up. Jun 24 12:53:01 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started Permit User Sessions. -- Subject: Unit systemd-user-sessions.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-user-sessions.service has finished starting up. -- -- The start-up result is done. Jun 24 12:53:01 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting Terminate Plymouth Boot Screen... -- Subject: Unit plymouth-quit.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit plymouth-quit.service has begun starting up. Jun 24 12:53:01 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting Hold until boot process finishes up... -- Subject: Unit plymouth-quit-wait.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit plymouth-quit-wait.service has begun starting up. Jun 24 12:53:01 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started Command Scheduler. -- Subject: Unit crond.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit crond.service has finished starting up. -- -- The start-up result is done. Jun 24 12:53:01 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Received SIGRTMIN+21 from PID 339 (plymouthd). Jun 24 12:53:01 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started Terminate Plymouth Boot Screen. -- Subject: Unit plymouth-quit.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit plymouth-quit.service has finished starting up. -- -- The start-up result is done. Jun 24 12:53:01 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Received SIGRTMIN+21 from PID 339 (n/a). Jun 24 12:53:01 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started Hold until boot process finishes up. -- Subject: Unit plymouth-quit-wait.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit plymouth-quit-wait.service has finished starting up. -- -- The start-up result is done. Jun 24 12:53:01 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started Getty on tty1. -- Subject: Unit getty@tty1.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit getty@tty1.service has finished starting up. -- -- The start-up result is done. Jun 24 12:53:01 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Reached target Login Prompts. -- Subject: Unit getty.target has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit getty.target has finished starting up. -- -- The start-up result is done. Jun 24 12:53:01 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Reached target Multi-User System. -- Subject: Unit multi-user.target has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit multi-user.target has finished starting up. -- -- The start-up result is done. Jun 24 12:53:01 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting Update UTMP about System Runlevel Changes... -- Subject: Unit systemd-update-utmp-runlevel.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-update-utmp-runlevel.service has begun starting up. Jun 24 12:53:01 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: systemd-update-utmp-runlevel.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit systemd-update-utmp-runlevel.service has successfully entered the 'dead' state. Jun 24 12:53:01 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started Update UTMP about System Runlevel Changes. -- Subject: Unit systemd-update-utmp-runlevel.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-update-utmp-runlevel.service has finished starting up. -- -- The start-up result is done. Jun 24 12:53:01 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com crond[1278]: (CRON) STARTUP (1.5.2) Jun 24 12:53:01 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com crond[1278]: (CRON) INFO (Syslog will be used instead of sendmail.) Jun 24 12:53:01 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com crond[1278]: (CRON) INFO (RANDOM_DELAY will be scaled with factor 25% if used.) Jun 24 12:53:01 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com crond[1278]: (CRON) INFO (running with inotify support) Jun 24 12:53:01 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com kdumpctl[1033]: kdump: Rebuilding /boot/initramfs-4.18.0-553.5.1.el8.x86_64kdump.img Jun 24 12:53:02 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[1539]: ############################################################# Jun 24 12:53:02 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[1545]: -----BEGIN SSH HOST KEY FINGERPRINTS----- Jun 24 12:53:02 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[1549]: 1024 SHA256:qHOQ/V+RzWYD/ouCE/nxV1BJhuMGSvvmPSY3Ru7j32w root@ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com (DSA) Jun 24 12:53:02 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[1552]: 256 SHA256:eLvaZVrNf3+DyMvggFRhpjiypvVbvYR9xZi7HUmlewo root@ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com (ECDSA) Jun 24 12:53:02 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[1558]: 256 SHA256:lT5ha7UudOaAkVDzJ8Fxf1O7AOWKp2XhOuBNnOwOD+o root@ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com (ED25519) Jun 24 12:53:02 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[1562]: 3072 SHA256:+w0CxLMUm7O2MT3aDW2Fy+/0gO01JNhKSMh8dIwevDA root@ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com (RSA) Jun 24 12:53:02 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[1563]: -----END SSH HOST KEY FINGERPRINTS----- Jun 24 12:53:02 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[1564]: ############################################################# Jun 24 12:53:02 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[1462]: Cloud-init v. 23.4-7.el8.2 running 'modules:final' at Tue, 24 Jun 2025 16:53:02 +0000. Up 22.06 seconds. Jun 24 12:53:02 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com cloud-init[1462]: Cloud-init v. 23.4-7.el8.2 finished at Tue, 24 Jun 2025 16:53:02 +0000. Datasource DataSourceEc2Local. Up 22.26 seconds Jun 24 12:53:02 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1581]: dracut-049-233.git20240115.el8 Jun 24 12:53:02 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started Execute cloud user/final scripts. -- Subject: Unit cloud-final.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit cloud-final.service has finished starting up. -- -- The start-up result is done. Jun 24 12:53:02 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Reached target Cloud-init target. -- Subject: Unit cloud-init.target has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit cloud-init.target has finished starting up. -- -- The start-up result is done. Jun 24 12:53:02 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: Executing: /usr/bin/dracut --add kdumpbase --quiet --hostonly --hostonly-cmdline --hostonly-i18n --hostonly-mode strict --hostonly-nics -o "plymouth dash resume ifcfg earlykdump" --compress=xz --mount "/dev/disk/by-uuid/fe591198-9082-4b15-9b62-e83518524cd2 /sysroot xfs rw,relatime,seclabel,attr2,inode64,logbufs=8,logbsize=32k,noquota,nofail,x-systemd.before=initrd-fs.target" --no-hostonly-default-device -f /boot/initramfs-4.18.0-553.5.1.el8.x86_64kdump.img 4.18.0-553.5.1.el8.x86_64 Jun 24 12:53:02 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com chronyd[606]: Selected source 10.2.32.37 Jun 24 12:53:02 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com chronyd[606]: System clock TAI offset set to 37 seconds Jun 24 12:53:02 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: dracut module 'busybox' will not be installed, because command 'busybox' could not be found! Jun 24 12:53:02 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: dracut module 'ifcfg' will not be installed, because it's in the list to be omitted! Jun 24 12:53:02 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: dracut module 'plymouth' will not be installed, because it's in the list to be omitted! Jun 24 12:53:02 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: dracut module 'lvmmerge' will not be installed, because command 'lvm' could not be found! Jun 24 12:53:02 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: dracut module 'lvmthinpool-monitor' will not be installed, because command 'lvm' could not be found! Jun 24 12:53:02 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: dracut module 'btrfs' will not be installed, because command 'btrfs' could not be found! Jun 24 12:53:02 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: dracut module 'dmraid' will not be installed, because command 'dmraid' could not be found! Jun 24 12:53:02 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: dracut module 'lvm' will not be installed, because command 'lvm' could not be found! Jun 24 12:53:02 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: dracut module 'mdraid' will not be installed, because command 'mdadm' could not be found! Jun 24 12:53:02 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: dracut module 'cifs' will not be installed, because command 'mount.cifs' could not be found! Jun 24 12:53:02 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: dracut module 'iscsi' will not be installed, because command 'iscsi-iname' could not be found! Jun 24 12:53:02 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: dracut module 'iscsi' will not be installed, because command 'iscsiadm' could not be found! Jun 24 12:53:02 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: dracut module 'iscsi' will not be installed, because command 'iscsid' could not be found! Jun 24 12:53:02 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: dracut module 'nvmf' will not be installed, because command 'nvme' could not be found! Jun 24 12:53:03 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: dracut module 'resume' will not be installed, because it's in the list to be omitted! Jun 24 12:53:03 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: dracut module 'earlykdump' will not be installed, because it's in the list to be omitted! Jun 24 12:53:03 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: memstrack is available Jun 24 12:53:03 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: dracut module 'busybox' will not be installed, because command 'busybox' could not be found! Jun 24 12:53:03 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: dracut module 'lvmmerge' will not be installed, because command 'lvm' could not be found! Jun 24 12:53:03 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: dracut module 'lvmthinpool-monitor' will not be installed, because command 'lvm' could not be found! Jun 24 12:53:03 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: dracut module 'btrfs' will not be installed, because command 'btrfs' could not be found! Jun 24 12:53:03 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: dracut module 'dmraid' will not be installed, because command 'dmraid' could not be found! Jun 24 12:53:03 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: dracut module 'lvm' will not be installed, because command 'lvm' could not be found! Jun 24 12:53:03 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: dracut module 'mdraid' will not be installed, because command 'mdadm' could not be found! Jun 24 12:53:03 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: dracut module 'cifs' will not be installed, because command 'mount.cifs' could not be found! Jun 24 12:53:03 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: dracut module 'iscsi' will not be installed, because command 'iscsi-iname' could not be found! Jun 24 12:53:03 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: dracut module 'iscsi' will not be installed, because command 'iscsiadm' could not be found! Jun 24 12:53:03 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: dracut module 'iscsi' will not be installed, because command 'iscsid' could not be found! Jun 24 12:53:03 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: dracut module 'nvmf' will not be installed, because command 'nvme' could not be found! Jun 24 12:53:03 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: *** Including module: bash *** Jun 24 12:53:03 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: *** Including module: systemd *** Jun 24 12:53:03 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: *** Including module: systemd-initrd *** Jun 24 12:53:03 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: *** Including module: watchdog-modules *** Jun 24 12:53:03 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: *** Including module: nss-softokn *** Jun 24 12:53:03 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: *** Including module: rngd *** Jun 24 12:53:03 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: *** Including module: i18n *** Jun 24 12:53:04 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: *** Including module: drm *** Jun 24 12:53:04 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: *** Including module: prefixdevname *** Jun 24 12:53:04 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: *** Including module: kernel-modules *** Jun 24 12:53:04 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: *** Including module: kernel-modules-extra *** Jun 24 12:53:04 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: kernel-modules-extra: configuration source "/run/depmod.d/" does not exist Jun 24 12:53:04 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: kernel-modules-extra: configuration source "/lib/depmod.d/" does not exist Jun 24 12:53:04 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: kernel-modules-extra: parsing configuration file "/etc/depmod.d//dist.conf" Jun 24 12:53:04 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: kernel-modules-extra: /etc/depmod.d//dist.conf: added "updates extra built-in weak-updates" to the list of search directories Jun 24 12:53:04 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: *** Including module: fstab-sys *** Jun 24 12:53:04 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: *** Including module: rootfs-block *** Jun 24 12:53:04 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: *** Including module: terminfo *** Jun 24 12:53:04 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: *** Including module: udev-rules *** Jun 24 12:53:04 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: Skipping udev rule: 91-permissions.rules Jun 24 12:53:04 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: Skipping udev rule: 80-drivers-modprobe.rules Jun 24 12:53:04 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: *** Including module: biosdevname *** Jun 24 12:53:04 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: *** Including module: dracut-systemd *** Jun 24 12:53:04 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: *** Including module: usrmount *** Jun 24 12:53:04 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: *** Including module: base *** Jun 24 12:53:05 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: *** Including module: fs-lib *** Jun 24 12:53:05 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: *** Including module: kdumpbase *** Jun 24 12:53:05 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: *** Including module: memstrack *** Jun 24 12:53:05 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: *** Including module: microcode_ctl-fw_dir_override *** Jun 24 12:53:05 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: microcode_ctl module: mangling fw_dir Jun 24 12:53:05 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: microcode_ctl: reset fw_dir to "/lib/firmware/updates /lib/firmware" Jun 24 12:53:05 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel"... Jun 24 12:53:05 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: microcode_ctl: intel: caveats check for kernel version "4.18.0-553.5.1.el8.x86_64" passed, adding "/usr/share/microcode_ctl/ucode_with_caveats/intel" to fw_dir variable Jun 24 12:53:05 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-2d-07"... Jun 24 12:53:05 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: microcode_ctl: configuration "intel-06-2d-07" is ignored Jun 24 12:53:05 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-4e-03"... Jun 24 12:53:05 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: microcode_ctl: configuration "intel-06-4e-03" is ignored Jun 24 12:53:05 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-4f-01"... Jun 24 12:53:05 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: microcode_ctl: configuration "intel-06-4f-01" is ignored Jun 24 12:53:05 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-55-04"... Jun 24 12:53:05 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: microcode_ctl: configuration "intel-06-55-04" is ignored Jun 24 12:53:05 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-5e-03"... Jun 24 12:53:05 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: microcode_ctl: configuration "intel-06-5e-03" is ignored Jun 24 12:53:05 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-8c-01"... Jun 24 12:53:05 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: microcode_ctl: configuration "intel-06-8c-01" is ignored Jun 24 12:53:05 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-8e-9e-0x-0xca"... Jun 24 12:53:05 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: microcode_ctl: configuration "intel-06-8e-9e-0x-0xca" is ignored Jun 24 12:53:05 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-8e-9e-0x-dell"... Jun 24 12:53:05 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: microcode_ctl: configuration "intel-06-8e-9e-0x-dell" is ignored Jun 24 12:53:05 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: microcode_ctl: final fw_dir: "/usr/share/microcode_ctl/ucode_with_caveats/intel /lib/firmware/updates /lib/firmware" Jun 24 12:53:05 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: *** Including module: shutdown *** Jun 24 12:53:05 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: *** Including module: squash *** Jun 24 12:53:05 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: *** Including modules done *** Jun 24 12:53:05 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: *** Installing kernel module dependencies *** Jun 24 12:53:05 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: *** Installing kernel module dependencies done *** Jun 24 12:53:05 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: *** Resolving executable dependencies *** Jun 24 12:53:06 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: NetworkManager-dispatcher.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jun 24 12:53:06 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: *** Resolving executable dependencies done*** Jun 24 12:53:06 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: *** Hardlinking files *** Jun 24 12:53:06 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: *** Hardlinking files done *** Jun 24 12:53:06 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: *** Generating early-microcode cpio image *** Jun 24 12:53:06 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: *** Constructing GenuineIntel.bin *** Jun 24 12:53:07 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: *** Constructing GenuineIntel.bin *** Jun 24 12:53:07 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: *** Store current command line parameters *** Jun 24 12:53:07 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: Stored kernel commandline: Jun 24 12:53:07 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: No dracut internal kernel commandline stored in the initramfs Jun 24 12:53:07 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: *** Install squash loader *** Jun 24 12:53:07 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: *** Stripping files *** Jun 24 12:53:07 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: *** Stripping files done *** Jun 24 12:53:07 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: *** Squashing the files inside the initramfs *** Jun 24 12:53:16 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: *** Squashing the files inside the initramfs done *** Jun 24 12:53:16 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: *** Creating image file '/boot/initramfs-4.18.0-553.5.1.el8.x86_64kdump.img' *** Jun 24 12:53:16 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dracut[1583]: *** Creating initramfs image file '/boot/initramfs-4.18.0-553.5.1.el8.x86_64kdump.img' done *** Jun 24 12:53:16 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com kdumpctl[1033]: kdump: kexec: loaded kdump kernel Jun 24 12:53:16 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com kdumpctl[1033]: kdump: Starting kdump: [OK] Jun 24 12:53:16 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started Crash recovery kernel arming. -- Subject: Unit kdump.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit kdump.service has finished starting up. -- -- The start-up result is done. Jun 24 12:53:16 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Startup finished in 532ms (kernel) + 4.274s (initrd) + 32.182s (userspace) = 36.989s. -- Subject: System start-up is now complete -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- All system services necessary queued for starting at boot have been -- started. Note that this does not mean that the machine is now idle as services -- might still be busy with completing start-up. -- -- Kernel start-up required 532427 microseconds. -- -- Initial RAM disk start-up required 4274689 microseconds. -- -- Userspace start-up required 32182577 microseconds. Jun 24 12:53:26 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: systemd-hostnamed.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit systemd-hostnamed.service has successfully entered the 'dead' state. Jun 24 12:54:07 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com chronyd[606]: Selected source 50.205.57.38 (2.centos.pool.ntp.org) Jun 24 12:54:31 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com sshd[5070]: Accepted publickey for root from 10.30.32.109 port 40374 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU Jun 24 12:54:31 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Created slice User Slice of UID 0. -- Subject: Unit user-0.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user-0.slice has finished starting up. -- -- The start-up result is done. Jun 24 12:54:31 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting User runtime directory /run/user/0... -- Subject: Unit user-runtime-dir@0.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user-runtime-dir@0.service has begun starting up. Jun 24 12:54:31 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd-logind[602]: New session 1 of user root. -- Subject: A new session 1 has been created for user root -- Defined-By: systemd -- Support: https://access.redhat.com/support -- Documentation: https://www.freedesktop.org/wiki/Software/systemd/multiseat -- -- A new session with the ID 1 has been created for the user root. -- -- The leading process of the session is 5070. Jun 24 12:54:31 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started User runtime directory /run/user/0. -- Subject: Unit user-runtime-dir@0.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user-runtime-dir@0.service has finished starting up. -- -- The start-up result is done. Jun 24 12:54:31 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting User Manager for UID 0... -- Subject: Unit user@0.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user@0.service has begun starting up. Jun 24 12:54:31 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[5075]: pam_unix(systemd-user:session): session opened for user root by (uid=0) Jun 24 12:54:31 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[5075]: Reached target Timers. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jun 24 12:54:31 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[5075]: Starting D-Bus User Message Bus Socket. -- Subject: Unit UNIT has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has begun starting up. Jun 24 12:54:31 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[5075]: Reached target Paths. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jun 24 12:54:31 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[5075]: Listening on D-Bus User Message Bus Socket. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jun 24 12:54:31 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[5075]: Reached target Sockets. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jun 24 12:54:31 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[5075]: Reached target Basic System. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jun 24 12:54:31 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[5075]: Reached target Default. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Jun 24 12:54:31 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[5075]: Startup finished in 34ms. -- Subject: User manager start-up is now complete -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The user manager instance for user 0 has been started. All services queued -- for starting have been started. Note that other services might still be starting -- up or be started at any later time. -- -- Startup of the manager took 34370 microseconds. Jun 24 12:54:31 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started User Manager for UID 0. -- Subject: Unit user@0.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user@0.service has finished starting up. -- -- The start-up result is done. Jun 24 12:54:31 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started Session 1 of user root. -- Subject: Unit session-1.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit session-1.scope has finished starting up. -- -- The start-up result is done. Jun 24 12:54:31 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com sshd[5070]: pam_unix(sshd:session): session opened for user root by (uid=0) Jun 24 12:54:31 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com sshd[5084]: Received disconnect from 10.30.32.109 port 40374:11: disconnected by user Jun 24 12:54:31 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com sshd[5084]: Disconnected from user root 10.30.32.109 port 40374 Jun 24 12:54:31 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com sshd[5070]: pam_unix(sshd:session): session closed for user root Jun 24 12:54:31 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: session-1.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit session-1.scope has successfully entered the 'dead' state. Jun 24 12:54:31 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd-logind[602]: Session 1 logged out. Waiting for processes to exit. Jun 24 12:54:31 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd-logind[602]: Removed session 1. -- Subject: Session 1 has been terminated -- Defined-By: systemd -- Support: https://access.redhat.com/support -- Documentation: https://www.freedesktop.org/wiki/Software/systemd/multiseat -- -- A session with the ID 1 has been terminated. Jun 24 12:54:39 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com sshd[5105]: Accepted publickey for root from 10.31.10.207 port 51402 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU Jun 24 12:54:39 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com sshd[5104]: Accepted publickey for root from 10.31.10.207 port 51392 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU Jun 24 12:54:39 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd-logind[602]: New session 3 of user root. -- Subject: A new session 3 has been created for user root -- Defined-By: systemd -- Support: https://access.redhat.com/support -- Documentation: https://www.freedesktop.org/wiki/Software/systemd/multiseat -- -- A new session with the ID 3 has been created for the user root. -- -- The leading process of the session is 5105. Jun 24 12:54:39 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started Session 3 of user root. -- Subject: Unit session-3.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit session-3.scope has finished starting up. -- -- The start-up result is done. Jun 24 12:54:39 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd-logind[602]: New session 4 of user root. -- Subject: A new session 4 has been created for user root -- Defined-By: systemd -- Support: https://access.redhat.com/support -- Documentation: https://www.freedesktop.org/wiki/Software/systemd/multiseat -- -- A new session with the ID 4 has been created for the user root. -- -- The leading process of the session is 5104. Jun 24 12:54:39 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started Session 4 of user root. -- Subject: Unit session-4.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit session-4.scope has finished starting up. -- -- The start-up result is done. Jun 24 12:54:39 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com sshd[5105]: pam_unix(sshd:session): session opened for user root by (uid=0) Jun 24 12:54:39 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com sshd[5104]: pam_unix(sshd:session): session opened for user root by (uid=0) Jun 24 12:54:39 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com sshd[5110]: Received disconnect from 10.31.10.207 port 51402:11: disconnected by user Jun 24 12:54:39 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com sshd[5110]: Disconnected from user root 10.31.10.207 port 51402 Jun 24 12:54:39 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com sshd[5105]: pam_unix(sshd:session): session closed for user root Jun 24 12:54:39 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: session-3.scope: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit session-3.scope has successfully entered the 'dead' state. Jun 24 12:54:39 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd-logind[602]: Session 3 logged out. Waiting for processes to exit. Jun 24 12:54:39 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd-logind[602]: Removed session 3. -- Subject: Session 3 has been terminated -- Defined-By: systemd -- Support: https://access.redhat.com/support -- Documentation: https://www.freedesktop.org/wiki/Software/systemd/multiseat -- -- A session with the ID 3 has been terminated. Jun 24 12:55:32 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com unknown: Running test '/Prepare-managed-node/tests/prep_managed_node' (serial number 1) with reboot count 0 and test restart count 0. (Be aware the test name is sanitized!) Jun 24 12:55:32 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dbus-daemon[599]: [system] Activating via systemd: service name='org.freedesktop.hostname1' unit='dbus-org.freedesktop.hostname1.service' requested by ':1.24' (uid=0 pid=6710 comm="hostnamectl set-hostname managed-node3 " label="unconfined_u:unconfined_r:unconfined_t:s0-s0:c0.c1023") Jun 24 12:55:32 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Starting Hostname Service... -- Subject: Unit systemd-hostnamed.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-hostnamed.service has begun starting up. Jun 24 12:55:33 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com dbus-daemon[599]: [system] Successfully activated service 'org.freedesktop.hostname1' Jun 24 12:55:33 ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com systemd[1]: Started Hostname Service. -- Subject: Unit systemd-hostnamed.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-hostnamed.service has finished starting up. -- -- The start-up result is done. Jun 24 12:55:33 managed-node3 systemd-hostnamed[6711]: Changed static host name to 'managed-node3' Jun 24 12:55:33 managed-node3 systemd-hostnamed[6711]: Changed host name to 'managed-node3' Jun 24 12:55:33 managed-node3 NetworkManager[665]: [1750784133.0662] hostname: static hostname changed from "ip-10-31-43-41.testing-farm.us-east-1.aws.redhat.com" to "managed-node3" Jun 24 12:55:33 managed-node3 dbus-daemon[599]: [system] Activating via systemd: service name='org.freedesktop.nm_dispatcher' unit='dbus-org.freedesktop.nm-dispatcher.service' requested by ':1.5' (uid=0 pid=665 comm="/usr/sbin/NetworkManager --no-daemon " label="system_u:system_r:NetworkManager_t:s0") Jun 24 12:55:33 managed-node3 systemd[1]: Starting Network Manager Script Dispatcher Service... -- Subject: Unit NetworkManager-dispatcher.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit NetworkManager-dispatcher.service has begun starting up. Jun 24 12:55:33 managed-node3 NetworkManager[665]: [1750784133.0797] policy: set-hostname: set hostname to 'managed-node3' (from system configuration) Jun 24 12:55:33 managed-node3 dbus-daemon[599]: [system] Successfully activated service 'org.freedesktop.nm_dispatcher' Jun 24 12:55:33 managed-node3 systemd[1]: Started Network Manager Script Dispatcher Service. -- Subject: Unit NetworkManager-dispatcher.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit NetworkManager-dispatcher.service has finished starting up. -- -- The start-up result is done. Jun 24 12:55:33 managed-node3 unknown: Leaving test '/Prepare-managed-node/tests/prep_managed_node' (serial number 1). (Be aware the test name is sanitized!) Jun 24 12:55:43 managed-node3 systemd[1]: NetworkManager-dispatcher.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jun 24 12:56:03 managed-node3 systemd[1]: systemd-hostnamed.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit systemd-hostnamed.service has successfully entered the 'dead' state. Jun 24 12:58:09 managed-node3 sshd[7289]: Accepted publickey for root from 10.31.11.118 port 59024 ssh2: RSA SHA256:9j1blwt3wcrRiGYZQ7ZGu9axm3cDklH6/z4c+Ee8CzE Jun 24 12:58:09 managed-node3 systemd-logind[602]: New session 5 of user root. -- Subject: A new session 5 has been created for user root -- Defined-By: systemd -- Support: https://access.redhat.com/support -- Documentation: https://www.freedesktop.org/wiki/Software/systemd/multiseat -- -- A new session with the ID 5 has been created for the user root. -- -- The leading process of the session is 7289. Jun 24 12:58:09 managed-node3 systemd[1]: Started Session 5 of user root. -- Subject: Unit session-5.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit session-5.scope has finished starting up. -- -- The start-up result is done. Jun 24 12:58:09 managed-node3 sshd[7289]: pam_unix(sshd:session): session opened for user root by (uid=0) Jun 24 12:58:10 managed-node3 platform-python[7434]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Jun 24 12:58:11 managed-node3 platform-python[7586]: ansible-service_facts Invoked Jun 24 12:58:13 managed-node3 platform-python[7795]: ansible-ansible.legacy.command Invoked with _raw_params=systemctl is-system-running _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 24 12:58:14 managed-node3 platform-python[7919]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jun 24 12:58:14 managed-node3 platform-python[8042]: ansible-ansible.legacy.command Invoked with _raw_params=systemctl is-system-running _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 24 12:58:15 managed-node3 platform-python[8166]: ansible-ansible.legacy.dnf Invoked with name=['pcp', 'pcp-zeroconf'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jun 24 12:58:51 managed-node3 kernel: SELinux: Converting 363 SID table entries... Jun 24 12:58:51 managed-node3 kernel: SELinux: policy capability network_peer_controls=1 Jun 24 12:58:51 managed-node3 kernel: SELinux: policy capability open_perms=1 Jun 24 12:58:51 managed-node3 kernel: SELinux: policy capability extended_socket_class=1 Jun 24 12:58:51 managed-node3 kernel: SELinux: policy capability always_check_network=0 Jun 24 12:58:51 managed-node3 kernel: SELinux: policy capability cgroup_seclabel=1 Jun 24 12:58:51 managed-node3 kernel: SELinux: policy capability nnp_nosuid_transition=1 Jun 24 12:58:51 managed-node3 dbus-daemon[599]: [system] Reloaded configuration Jun 24 12:58:52 managed-node3 groupadd[8926]: group added to /etc/group: name=pcp, GID=991 Jun 24 12:58:52 managed-node3 groupadd[8926]: group added to /etc/gshadow: name=pcp Jun 24 12:58:52 managed-node3 groupadd[8926]: new group: name=pcp, GID=991 Jun 24 12:58:52 managed-node3 useradd[8933]: new user: name=pcp, UID=994, GID=991, home=/var/lib/pcp, shell=/sbin/nologin Jun 24 12:58:53 managed-node3 systemd[1]: Starting Performance Metrics Collector Daemon... -- Subject: Unit pmcd.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmcd.service has begun starting up. Jun 24 12:58:54 managed-node3 systemd[1]: Started Performance Metrics Collector Daemon. -- Subject: Unit pmcd.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmcd.service has finished starting up. -- -- The start-up result is done. Jun 24 12:58:54 managed-node3 systemd[1]: Starting Performance Metrics Archive Logger... -- Subject: Unit pmlogger.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger.service has begun starting up. Jun 24 12:58:54 managed-node3 systemd[1]: Starting Performance Metrics Inference Engine... -- Subject: Unit pmie.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmie.service has begun starting up. Jun 24 12:58:54 managed-node3 pmlogger[9221]: /usr/libexec/pcp/lib/pmlogger: Warning: Performance Co-Pilot archive logger(s) not permanently enabled. Jun 24 12:58:54 managed-node3 pmcd[9386]: Installing dm PMDA ... Jun 24 12:58:54 managed-node3 pmlogger[9221]: To enable pmlogger, run the following as root: Jun 24 12:58:54 managed-node3 pmlogger[9221]: # /usr/bin/systemctl enable pmlogger.service Jun 24 12:58:54 managed-node3 pmie[9222]: /usr/libexec/pcp/lib/pmie: Warning: Performance Co-Pilot Inference Engine (pmie) not permanently enabled. Jun 24 12:58:54 managed-node3 pmie[9222]: To enable pmie, run the following as root: Jun 24 12:58:54 managed-node3 pmie[9222]: # /usr/bin/systemctl enable pmie.service Jun 24 12:58:54 managed-node3 systemd[1]: Started Performance Metrics Inference Engine. -- Subject: Unit pmie.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmie.service has finished starting up. -- -- The start-up result is done. Jun 24 12:58:54 managed-node3 systemd[1]: Started Half-hourly check of PMIE instances. -- Subject: Unit pmie_check.timer has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmie_check.timer has finished starting up. -- -- The start-up result is done. Jun 24 12:58:54 managed-node3 systemd[1]: Started Daily processing of PMIE logs. -- Subject: Unit pmie_daily.timer has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmie_daily.timer has finished starting up. -- -- The start-up result is done. Jun 24 12:58:54 managed-node3 systemd[1]: Starting pmie farm service... -- Subject: Unit pmie_farm.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmie_farm.service has begun starting up. Jun 24 12:58:54 managed-node3 systemd[1]: Starting Check PMIE instances are running... -- Subject: Unit pmie_check.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmie_check.service has begun starting up. Jun 24 12:58:54 managed-node3 systemd[1]: Started pmie farm service. -- Subject: Unit pmie_farm.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmie_farm.service has finished starting up. -- -- The start-up result is done. Jun 24 12:58:54 managed-node3 systemd[1]: Started Half-hourly check of pmie farm instances. -- Subject: Unit pmie_farm_check.timer has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmie_farm_check.timer has finished starting up. -- -- The start-up result is done. Jun 24 12:58:54 managed-node3 systemd[1]: Starting Check and migrate non-primary pmie farm instances... -- Subject: Unit pmie_farm_check.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmie_farm_check.service has begun starting up. Jun 24 12:58:54 managed-node3 systemd[1]: Started Check PMIE instances are running. -- Subject: Unit pmie_check.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmie_check.service has finished starting up. -- -- The start-up result is done. Jun 24 12:58:54 managed-node3 systemd[1]: Started Check and migrate non-primary pmie farm instances. -- Subject: Unit pmie_farm_check.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmie_farm_check.service has finished starting up. -- -- The start-up result is done. Jun 24 12:58:54 managed-node3 systemd[1]: pmie_farm_check.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit pmie_farm_check.service has successfully entered the 'dead' state. Jun 24 12:58:54 managed-node3 systemd[1]: pmie_check.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit pmie_check.service has successfully entered the 'dead' state. Jun 24 12:58:55 managed-node3 systemd[1]: Started Performance Metrics Archive Logger. -- Subject: Unit pmlogger.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger.service has finished starting up. -- -- The start-up result is done. Jun 24 12:58:55 managed-node3 systemd[1]: Started Daily processing of archive logs. -- Subject: Unit pmlogger_daily.timer has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger_daily.timer has finished starting up. -- -- The start-up result is done. Jun 24 12:58:55 managed-node3 systemd[1]: Started Half-hourly check of pmlogger instances. -- Subject: Unit pmlogger_check.timer has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger_check.timer has finished starting up. -- -- The start-up result is done. Jun 24 12:58:55 managed-node3 systemd[1]: Starting pmlogger farm service... -- Subject: Unit pmlogger_farm.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger_farm.service has begun starting up. Jun 24 12:58:55 managed-node3 systemd[1]: Starting Check pmlogger instances are running... -- Subject: Unit pmlogger_check.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger_check.service has begun starting up. Jun 24 12:58:55 managed-node3 systemd[1]: Started pmlogger farm service. -- Subject: Unit pmlogger_farm.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger_farm.service has finished starting up. -- -- The start-up result is done. Jun 24 12:58:55 managed-node3 systemd[1]: Started Half-hourly check of pmlogger farm instances. -- Subject: Unit pmlogger_farm_check.timer has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger_farm_check.timer has finished starting up. -- -- The start-up result is done. Jun 24 12:58:55 managed-node3 systemd[1]: Starting Check and migrate non-primary pmlogger farm instances... -- Subject: Unit pmlogger_farm_check.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger_farm_check.service has begun starting up. Jun 24 12:58:55 managed-node3 systemd[1]: Reloading. Jun 24 12:58:55 managed-node3 systemd[1]: Started Check pmlogger instances are running. -- Subject: Unit pmlogger_check.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger_check.service has finished starting up. -- -- The start-up result is done. Jun 24 12:58:55 managed-node3 systemd[1]: Started Check and migrate non-primary pmlogger farm instances. -- Subject: Unit pmlogger_farm_check.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger_farm_check.service has finished starting up. -- -- The start-up result is done. Jun 24 12:58:55 managed-node3 systemd[1]: Started /usr/bin/systemctl start man-db-cache-update. -- Subject: Unit run-r5dd6c7c4a34d41b9961e4c773fbb1ee9.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit run-r5dd6c7c4a34d41b9961e4c773fbb1ee9.service has finished starting up. -- -- The start-up result is done. Jun 24 12:58:55 managed-node3 systemd[1]: Reloading. Jun 24 12:58:56 managed-node3 systemd[1]: pmlogger_farm_check.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit pmlogger_farm_check.service has successfully entered the 'dead' state. Jun 24 12:58:56 managed-node3 systemd[1]: cgroup compatibility translation between legacy and unified hierarchy settings activated. See cgroup-compat debug messages for details. Jun 24 12:58:56 managed-node3 systemd[1]: Starting man-db-cache-update.service... -- Subject: Unit man-db-cache-update.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit man-db-cache-update.service has begun starting up. Jun 24 12:58:56 managed-node3 systemd[1]: pmlogger_check.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit pmlogger_check.service has successfully entered the 'dead' state. Jun 24 12:58:57 managed-node3 platform-python[12041]: ansible-ansible.legacy.dnf Invoked with name=['cyrus-sasl-lib', 'cyrus-sasl-scram'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jun 24 12:58:59 managed-node3 systemd[1]: man-db-cache-update.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit man-db-cache-update.service has successfully entered the 'dead' state. Jun 24 12:58:59 managed-node3 systemd[1]: Started man-db-cache-update.service. -- Subject: Unit man-db-cache-update.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit man-db-cache-update.service has finished starting up. -- -- The start-up result is done. Jun 24 12:58:59 managed-node3 systemd[1]: run-r5dd6c7c4a34d41b9961e4c773fbb1ee9.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit run-r5dd6c7c4a34d41b9961e4c773fbb1ee9.service has successfully entered the 'dead' state. Jun 24 12:58:59 managed-node3 kernel: device-mapper: uevent: version 1.0.3 Jun 24 12:58:59 managed-node3 kernel: device-mapper: ioctl: 4.46.0-ioctl (2022-02-22) initialised: dm-devel@redhat.com Jun 24 12:59:01 managed-node3 platform-python[14671]: ansible-ansible.legacy.command Invoked with _raw_params=cat /etc/pcp/pmcd/pmcd.conf _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 24 12:59:01 managed-node3 pmcd[14705]: Installing nfsclient PMDA ... Jun 24 12:59:02 managed-node3 platform-python[14933]: ansible-file Invoked with path=/etc/pcp/labels state=directory mode=0755 owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:02 managed-node3 platform-python[15073]: ansible-file Invoked with path=/etc/pcp/labels/optional state=directory mode=0755 owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:03 managed-node3 platform-python[15196]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/labels/ansible-managed follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 24 12:59:03 managed-node3 platform-python[15386]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1750784342.934954-9175-71720048783422/source dest=/etc/pcp/labels/ansible-managed mode=0644 follow=False _original_basename=pmcd.explicit.labels.j2 checksum=5f36b2ea290645ee34d943220a14b54ee5ea5be5 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:04 managed-node3 platform-python[15515]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/labels/optional/ansible-managed follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 24 12:59:04 managed-node3 platform-python[15614]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1750784343.8109293-9211-81348794928374/source dest=/etc/pcp/labels/optional/ansible-managed mode=0644 follow=False _original_basename=pmcd.implicit.labels.j2 checksum=5f36b2ea290645ee34d943220a14b54ee5ea5be5 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:04 managed-node3 platform-python[15744]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmcd follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 24 12:59:05 managed-node3 platform-python[15845]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1750784344.5356944-9248-34468326116810/source dest=/etc/sysconfig/pmcd mode=0644 follow=False _original_basename=pmcd.defaults.j2 checksum=7518789c091387cd9c322e1a8fa8aad21d4efbd3 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:05 managed-node3 pmcd[15983]: Installing openmetrics PMDA ... Jun 24 12:59:05 managed-node3 platform-python[15970]: ansible-user Invoked with name=metrics system=True state=present non_unique=False force=False remove=False create_home=True move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node3 update_password=always uid=None group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None Jun 24 12:59:05 managed-node3 useradd[16016]: new group: name=metrics, GID=990 Jun 24 12:59:05 managed-node3 useradd[16016]: new user: name=metrics, UID=993, GID=990, home=/home/metrics, shell=/bin/bash Jun 24 12:59:06 managed-node3 pmcd[16027]: [Tue Jun 24 12:59:06] pmdaopenmetrics(16027) Info: Initializing ... currently in notready state. Jun 24 12:59:06 managed-node3 pmcd[16027]: [Tue Jun 24 12:59:06] pmdaopenmetrics(16027) Info: Config change detected, traversed 1 config entries in 0.0001s, rescanning ... Jun 24 12:59:06 managed-node3 pmcd[16027]: [Tue Jun 24 12:59:06] pmdaopenmetrics(16027) Info: Found source grafana cluster 1 Jun 24 12:59:06 managed-node3 pmcd[16027]: [Tue Jun 24 12:59:06] pmdaopenmetrics(16027) Info: Ready to process requests Jun 24 12:59:06 managed-node3 pmcd[16097]: [Tue Jun 24 12:59:06] pmdaopenmetrics(16097) Info: Initializing ... currently in notready state. Jun 24 12:59:06 managed-node3 pmcd[16097]: [Tue Jun 24 12:59:06] pmdaopenmetrics(16097) Info: Config change detected, traversed 1 config entries in 0.0001s, rescanning ... Jun 24 12:59:06 managed-node3 pmcd[16097]: [Tue Jun 24 12:59:06] pmdaopenmetrics(16097) Info: Found source grafana cluster 1 Jun 24 12:59:06 managed-node3 pmcd[16097]: [Tue Jun 24 12:59:06] pmdaopenmetrics(16097) Info: Ready to process requests Jun 24 12:59:06 managed-node3 platform-python[16154]: ansible-ansible.legacy.command Invoked with _raw_params=set -eu if set -o | grep -q pipefail; then set -o pipefail # pipefail not supported on debian, some ubuntu fi if ! sasldblistusers2 -f "/etc/pcp/passwd.db" | grep -q "^metrics@"; then echo "Creating new metrics user in /etc/pcp/passwd.db" echo "metrics" | saslpasswd2 -a pmcd "metrics" chown root:pcp "/etc/pcp/passwd.db" chmod 640 "/etc/pcp/passwd.db" fi _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 24 12:59:06 managed-node3 sasldblistusers2[16208]: _sasldb_getkeyhandle has failed Jun 24 12:59:06 managed-node3 saslpasswd2[16219]: error deleting entry from sasldb: BDB0073 DB_NOTFOUND: No matching key/data pair found Jun 24 12:59:06 managed-node3 saslpasswd2[16219]: error deleting entry from sasldb: BDB0073 DB_NOTFOUND: No matching key/data pair found Jun 24 12:59:06 managed-node3 saslpasswd2[16219]: error deleting entry from sasldb: BDB0073 DB_NOTFOUND: No matching key/data pair found Jun 24 12:59:06 managed-node3 platform-python[16372]: ansible-ansible.legacy.stat Invoked with path=/etc/sasl2/pmcd.conf follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 24 12:59:07 managed-node3 platform-python[16473]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1750784346.5479815-9341-134487052617880/source dest=/etc/sasl2/pmcd.conf mode=0644 follow=False _original_basename=pmcd.sasl2.conf.j2 checksum=615d2de55ab86108da0c7e6b64988fecb4169771 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:08 managed-node3 platform-python[16694]: ansible-ansible.legacy.systemd Invoked with name=pmcd state=restarted enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jun 24 12:59:08 managed-node3 systemd[1]: Stopping Performance Metrics Collector Daemon... -- Subject: Unit pmcd.service has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmcd.service has begun shutting down. Jun 24 12:59:08 managed-node3 pmcd[15984]: Terminated Jun 24 12:59:08 managed-node3 pmcd[9254]: _pmda_setup: Interrupted! Jun 24 12:59:08 managed-node3 pmcd[9254]: _pmda_setup_cleanup: reset .NeedInstall for openmetrics PMDA Jun 24 12:59:08 managed-node3 systemd[1]: pmcd.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit pmcd.service has successfully entered the 'dead' state. Jun 24 12:59:08 managed-node3 systemd[1]: Stopped Performance Metrics Collector Daemon. -- Subject: Unit pmcd.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmcd.service has finished shutting down. Jun 24 12:59:08 managed-node3 systemd[1]: Starting Performance Metrics Collector Daemon... -- Subject: Unit pmcd.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmcd.service has begun starting up. Jun 24 12:59:09 managed-node3 systemd[1]: Started Performance Metrics Collector Daemon. -- Subject: Unit pmcd.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmcd.service has finished starting up. -- -- The start-up result is done. Jun 24 12:59:09 managed-node3 pmcd[17258]: Installing openmetrics PMDA ... Jun 24 12:59:09 managed-node3 pmcd[17312]: [Tue Jun 24 12:59:09] pmdaopenmetrics(17312) Info: Initializing ... currently in notready state. Jun 24 12:59:09 managed-node3 pmcd[17312]: [Tue Jun 24 12:59:09] pmdaopenmetrics(17312) Info: Config change detected, traversed 1 config entries in 0.0001s, rescanning ... Jun 24 12:59:09 managed-node3 pmcd[17312]: [Tue Jun 24 12:59:09] pmdaopenmetrics(17312) Info: Found source grafana cluster 1 Jun 24 12:59:09 managed-node3 pmcd[17312]: [Tue Jun 24 12:59:09] pmdaopenmetrics(17312) Info: Ready to process requests Jun 24 12:59:09 managed-node3 platform-python[17399]: ansible-file Invoked with path=/etc/pcp/pmieconf/network state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:09 managed-node3 pmcd[17400]: [Tue Jun 24 12:59:09] pmdaopenmetrics(17400) Info: Initializing ... currently in notready state. Jun 24 12:59:09 managed-node3 pmcd[17400]: [Tue Jun 24 12:59:09] pmdaopenmetrics(17400) Info: Config change detected, traversed 1 config entries in 0.0001s, rescanning ... Jun 24 12:59:09 managed-node3 pmcd[17400]: [Tue Jun 24 12:59:09] pmdaopenmetrics(17400) Info: Found source grafana cluster 1 Jun 24 12:59:09 managed-node3 pmcd[17400]: [Tue Jun 24 12:59:09] pmdaopenmetrics(17400) Info: Ready to process requests Jun 24 12:59:10 managed-node3 platform-python[17644]: ansible-file Invoked with path=/etc/pcp/pmieconf/power state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:10 managed-node3 platform-python[17767]: ansible-file Invoked with path=/etc/pcp/pmieconf/zeroconf state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:10 managed-node3 platform-python[17890]: ansible-file Invoked with path=/etc/pcp/pmieconf/filesys state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:11 managed-node3 platform-python[18070]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/network state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:11 managed-node3 platform-python[18197]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/power state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:11 managed-node3 platform-python[18320]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/zeroconf state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:12 managed-node3 platform-python[18443]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/filesys state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:12 managed-node3 platform-python[18651]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/network/tcplistenoverflows follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 24 12:59:13 managed-node3 platform-python[18752]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1750784352.5445395-9638-163695761326552/source dest=/etc/pcp/pmieconf/network/tcplistenoverflows owner=root group=root mode=0644 _original_basename=tcplistenoverflows follow=False checksum=608d8a6ac6ee33bb86b77d28ba24fbcd378db43d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:13 managed-node3 platform-python[18877]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/network/tcpqfulldocookies follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 24 12:59:13 managed-node3 platform-python[18983]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1750784353.2219875-9638-115977414222571/source dest=/etc/pcp/pmieconf/network/tcpqfulldocookies owner=root group=root mode=0644 _original_basename=tcpqfulldocookies follow=False checksum=3256a5c2e8d07a20d8e97a08c0ab163252b0beae backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:14 managed-node3 platform-python[19108]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/network/tcpqfulldrops follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 24 12:59:14 managed-node3 platform-python[19209]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1750784353.8862958-9638-88021635534649/source dest=/etc/pcp/pmieconf/network/tcpqfulldrops owner=root group=root mode=0644 _original_basename=tcpqfulldrops follow=False checksum=37b2bd7f2430bd9678ab078c5e69a53bea556524 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:14 managed-node3 platform-python[19345]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/power/thermal_throttle follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 24 12:59:15 managed-node3 platform-python[19409]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/power/thermal_throttle _original_basename=thermal_throttle recurse=False state=file path=/etc/pcp/pmieconf/power/thermal_throttle force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:15 managed-node3 platform-python[19532]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/zeroconf/all_threads follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 24 12:59:15 managed-node3 platform-python[19633]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1750784355.1838043-9638-246766652983140/source dest=/etc/pcp/pmieconf/zeroconf/all_threads owner=root group=root mode=0644 _original_basename=all_threads follow=False checksum=65169db16dcaa224c211373001adc3addf1031c4 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:16 managed-node3 platform-python[19758]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/filesys/vfs_files follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 24 12:59:16 managed-node3 platform-python[19857]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1750784355.8720796-9638-98945739888101/source dest=/etc/pcp/pmieconf/filesys/vfs_files owner=root group=root mode=0644 _original_basename=vfs_files follow=False checksum=cd5d85dfb8eebd7d9737d56e78bd969dafa3999c backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:17 managed-node3 platform-python[19982]: ansible-lineinfile Invoked with state=absent path=/var/lib/pcp/config/pmie/config.default regexp=//.*global webhook_endpoint = "" backrefs=False create=False backup=False firstmatch=False unsafe_writes=False search_string=None line=None insertafter=None insertbefore=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:17 managed-node3 platform-python[20105]: ansible-file Invoked with src=/etc/pcp/pmieconf/network/tcplistenoverflows dest=/var/lib/pcp/config/pmieconf/network/tcplistenoverflows state=link force=True path=/var/lib/pcp/config/pmieconf/network/tcplistenoverflows recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:17 managed-node3 platform-python[20228]: ansible-file Invoked with src=/etc/pcp/pmieconf/network/tcpqfulldocookies dest=/var/lib/pcp/config/pmieconf/network/tcpqfulldocookies state=link force=True path=/var/lib/pcp/config/pmieconf/network/tcpqfulldocookies recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:18 managed-node3 platform-python[20351]: ansible-file Invoked with src=/etc/pcp/pmieconf/network/tcpqfulldrops dest=/var/lib/pcp/config/pmieconf/network/tcpqfulldrops state=link force=True path=/var/lib/pcp/config/pmieconf/network/tcpqfulldrops recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:18 managed-node3 platform-python[20474]: ansible-file Invoked with src=/etc/pcp/pmieconf/power/thermal_throttle dest=/var/lib/pcp/config/pmieconf/power/thermal_throttle state=link force=True path=/var/lib/pcp/config/pmieconf/power/thermal_throttle recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:19 managed-node3 platform-python[20597]: ansible-file Invoked with src=/etc/pcp/pmieconf/zeroconf/all_threads dest=/var/lib/pcp/config/pmieconf/zeroconf/all_threads state=link force=True path=/var/lib/pcp/config/pmieconf/zeroconf/all_threads recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:19 managed-node3 platform-python[20720]: ansible-file Invoked with src=/etc/pcp/pmieconf/filesys/vfs_files dest=/var/lib/pcp/config/pmieconf/filesys/vfs_files state=link force=True path=/var/lib/pcp/config/pmieconf/filesys/vfs_files recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:20 managed-node3 platform-python[20843]: ansible-ansible.legacy.systemd Invoked with name=pmie state=restarted enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jun 24 12:59:20 managed-node3 systemd[1]: Stopping pmie farm service... -- Subject: Unit pmie_farm.service has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmie_farm.service has begun shutting down. Jun 24 12:59:20 managed-node3 systemd[1]: pmie_farm.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit pmie_farm.service has successfully entered the 'dead' state. Jun 24 12:59:20 managed-node3 systemd[1]: Stopped pmie farm service. -- Subject: Unit pmie_farm.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmie_farm.service has finished shutting down. Jun 24 12:59:20 managed-node3 systemd[1]: Stopping Performance Metrics Inference Engine... -- Subject: Unit pmie.service has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmie.service has begun shutting down. Jun 24 12:59:20 managed-node3 systemd[1]: pmie.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit pmie.service has successfully entered the 'dead' state. Jun 24 12:59:20 managed-node3 systemd[1]: Stopped Performance Metrics Inference Engine. -- Subject: Unit pmie.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmie.service has finished shutting down. Jun 24 12:59:20 managed-node3 systemd[1]: Starting Performance Metrics Inference Engine... -- Subject: Unit pmie.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmie.service has begun starting up. Jun 24 12:59:20 managed-node3 systemd[1]: Started Performance Metrics Inference Engine. -- Subject: Unit pmie.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmie.service has finished starting up. -- -- The start-up result is done. Jun 24 12:59:20 managed-node3 systemd[1]: Starting pmie farm service... -- Subject: Unit pmie_farm.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmie_farm.service has begun starting up. Jun 24 12:59:20 managed-node3 systemd[1]: Started pmie farm service. -- Subject: Unit pmie_farm.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmie_farm.service has finished starting up. -- -- The start-up result is done. Jun 24 12:59:21 managed-node3 platform-python[21542]: ansible-lineinfile Invoked with path=/etc/pcp.conf regexp=^PCP_ARCHIVE_DIR= line=PCP_ARCHIVE_DIR=/var/log/pcp/pmlogger state=present backrefs=False create=False backup=False firstmatch=False unsafe_writes=False search_string=None insertafter=None insertbefore=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:21 managed-node3 platform-python[21665]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmlogger follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 24 12:59:21 managed-node3 platform-python[21766]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1750784361.1011348-10060-59660380507071/source dest=/etc/sysconfig/pmlogger mode=0644 follow=False _original_basename=pmlogger.defaults.j2 checksum=67bc35973101c614e92b1990f8bebfffc39fe498 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:22 managed-node3 platform-python[21891]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmlogger_timers follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 24 12:59:22 managed-node3 platform-python[21992]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1750784361.8190463-10091-74069218114231/source dest=/etc/sysconfig/pmlogger_timers mode=0644 follow=False _original_basename=pmlogger.timers.j2 checksum=df7bd3b5b6f1de3af164aab81441c7251a13a298 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:23 managed-node3 platform-python[22117]: ansible-ansible.legacy.systemd Invoked with name=pmlogger state=restarted enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jun 24 12:59:23 managed-node3 systemd[1]: Stopping pmlogger farm service... -- Subject: Unit pmlogger_farm.service has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger_farm.service has begun shutting down. Jun 24 12:59:23 managed-node3 systemd[1]: pmlogger_farm.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit pmlogger_farm.service has successfully entered the 'dead' state. Jun 24 12:59:23 managed-node3 systemd[1]: Stopped pmlogger farm service. -- Subject: Unit pmlogger_farm.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger_farm.service has finished shutting down. Jun 24 12:59:23 managed-node3 systemd[1]: Stopping Performance Metrics Archive Logger... -- Subject: Unit pmlogger.service has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger.service has begun shutting down. Jun 24 12:59:23 managed-node3 systemd[1]: pmlogger.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit pmlogger.service has successfully entered the 'dead' state. Jun 24 12:59:23 managed-node3 systemd[1]: Stopped Performance Metrics Archive Logger. -- Subject: Unit pmlogger.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger.service has finished shutting down. Jun 24 12:59:23 managed-node3 systemd[1]: Starting Performance Metrics Archive Logger... -- Subject: Unit pmlogger.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger.service has begun starting up. Jun 24 12:59:23 managed-node3 systemd[1]: Started Performance Metrics Archive Logger. -- Subject: Unit pmlogger.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger.service has finished starting up. -- -- The start-up result is done. Jun 24 12:59:23 managed-node3 systemd[1]: Starting pmlogger farm service... -- Subject: Unit pmlogger_farm.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger_farm.service has begun starting up. Jun 24 12:59:23 managed-node3 systemd[1]: Started pmlogger farm service. -- Subject: Unit pmlogger_farm.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger_farm.service has finished starting up. -- -- The start-up result is done. Jun 24 12:59:24 managed-node3 platform-python[22745]: ansible-ansible.legacy.systemd Invoked with name=pmlogger state=restarted daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None Jun 24 12:59:24 managed-node3 systemd[1]: Stopping pmlogger farm service... -- Subject: Unit pmlogger_farm.service has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger_farm.service has begun shutting down. Jun 24 12:59:24 managed-node3 systemd[1]: pmlogger_farm.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit pmlogger_farm.service has successfully entered the 'dead' state. Jun 24 12:59:24 managed-node3 systemd[1]: Stopped pmlogger farm service. -- Subject: Unit pmlogger_farm.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger_farm.service has finished shutting down. Jun 24 12:59:24 managed-node3 systemd[1]: Stopping Performance Metrics Archive Logger... -- Subject: Unit pmlogger.service has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger.service has begun shutting down. Jun 24 12:59:24 managed-node3 systemd[1]: pmlogger.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit pmlogger.service has successfully entered the 'dead' state. Jun 24 12:59:24 managed-node3 systemd[1]: Stopped Performance Metrics Archive Logger. -- Subject: Unit pmlogger.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger.service has finished shutting down. Jun 24 12:59:24 managed-node3 systemd[1]: Starting Performance Metrics Archive Logger... -- Subject: Unit pmlogger.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger.service has begun starting up. Jun 24 12:59:25 managed-node3 systemd[1]: Started Performance Metrics Archive Logger. -- Subject: Unit pmlogger.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger.service has finished starting up. -- -- The start-up result is done. Jun 24 12:59:25 managed-node3 systemd[1]: Starting pmlogger farm service... -- Subject: Unit pmlogger_farm.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger_farm.service has begun starting up. Jun 24 12:59:25 managed-node3 systemd[1]: Started pmlogger farm service. -- Subject: Unit pmlogger_farm.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit pmlogger_farm.service has finished starting up. -- -- The start-up result is done. Jun 24 12:59:26 managed-node3 platform-python[23357]: ansible-service_facts Invoked Jun 24 12:59:30 managed-node3 platform-python[23620]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Jun 24 12:59:31 managed-node3 platform-python[23772]: ansible-service_facts Invoked Jun 24 12:59:33 managed-node3 platform-python[23984]: ansible-ansible.legacy.command Invoked with _raw_params=systemctl is-system-running _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 24 12:59:34 managed-node3 platform-python[24108]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jun 24 12:59:34 managed-node3 platform-python[24231]: ansible-ansible.legacy.command Invoked with _raw_params=systemctl is-system-running _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 24 12:59:35 managed-node3 platform-python[24355]: ansible-ansible.legacy.dnf Invoked with name=['pcp', 'pcp-zeroconf'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jun 24 12:59:38 managed-node3 platform-python[24479]: ansible-ansible.legacy.dnf Invoked with name=['cyrus-sasl-lib', 'cyrus-sasl-scram'] state=present allow_downgrade=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 allowerasing=False nobest=False use_backend=auto conf_file=None disable_excludes=None download_dir=None list=None releasever=None Jun 24 12:59:41 managed-node3 platform-python[24603]: ansible-ansible.legacy.command Invoked with _raw_params=cat /etc/pcp/pmcd/pmcd.conf _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 24 12:59:42 managed-node3 platform-python[24727]: ansible-file Invoked with path=/etc/pcp/labels state=directory mode=0755 owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:43 managed-node3 platform-python[24850]: ansible-file Invoked with path=/etc/pcp/labels/optional state=directory mode=0755 owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:43 managed-node3 platform-python[24973]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/labels/ansible-managed follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 24 12:59:43 managed-node3 platform-python[25037]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/pcp/labels/ansible-managed _original_basename=pmcd.explicit.labels.j2 recurse=False state=file path=/etc/pcp/labels/ansible-managed force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:44 managed-node3 platform-python[25160]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/labels/optional/ansible-managed follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 24 12:59:44 managed-node3 platform-python[25224]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/pcp/labels/optional/ansible-managed _original_basename=pmcd.implicit.labels.j2 recurse=False state=file path=/etc/pcp/labels/optional/ansible-managed force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:44 managed-node3 platform-python[25347]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmcd follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 24 12:59:45 managed-node3 platform-python[25411]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/sysconfig/pmcd _original_basename=pmcd.defaults.j2 recurse=False state=file path=/etc/sysconfig/pmcd force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:45 managed-node3 platform-python[25534]: ansible-user Invoked with name=metrics system=True state=present non_unique=False force=False remove=False create_home=True move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node3 update_password=always uid=None group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None Jun 24 12:59:46 managed-node3 platform-python[25661]: ansible-ansible.legacy.command Invoked with _raw_params=set -eu if set -o | grep -q pipefail; then set -o pipefail # pipefail not supported on debian, some ubuntu fi if ! sasldblistusers2 -f "/etc/pcp/passwd.db" | grep -q "^metrics@"; then echo "Creating new metrics user in /etc/pcp/passwd.db" echo "metrics" | saslpasswd2 -a pmcd "metrics" chown root:pcp "/etc/pcp/passwd.db" chmod 640 "/etc/pcp/passwd.db" fi _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jun 24 12:59:46 managed-node3 platform-python[25789]: ansible-ansible.legacy.stat Invoked with path=/etc/sasl2/pmcd.conf follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 24 12:59:46 managed-node3 platform-python[25853]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/sasl2/pmcd.conf _original_basename=pmcd.sasl2.conf.j2 recurse=False state=file path=/etc/sasl2/pmcd.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:47 managed-node3 platform-python[25976]: ansible-ansible.legacy.systemd Invoked with name=pmcd state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jun 24 12:59:48 managed-node3 platform-python[26103]: ansible-file Invoked with path=/etc/pcp/pmieconf/network state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:48 managed-node3 platform-python[26226]: ansible-file Invoked with path=/etc/pcp/pmieconf/power state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:48 managed-node3 platform-python[26349]: ansible-file Invoked with path=/etc/pcp/pmieconf/zeroconf state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:49 managed-node3 platform-python[26472]: ansible-file Invoked with path=/etc/pcp/pmieconf/filesys state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:49 managed-node3 platform-python[26595]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/network state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:49 managed-node3 platform-python[26718]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/power state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:50 managed-node3 platform-python[26841]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/zeroconf state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:50 managed-node3 platform-python[26964]: ansible-file Invoked with path=/var/lib/pcp/config/pmieconf/filesys state=directory owner=root group=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:50 managed-node3 platform-python[27087]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/network/tcplistenoverflows follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 24 12:59:51 managed-node3 platform-python[27151]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/network/tcplistenoverflows _original_basename=tcplistenoverflows recurse=False state=file path=/etc/pcp/pmieconf/network/tcplistenoverflows force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:51 managed-node3 platform-python[27274]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/network/tcpqfulldocookies follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 24 12:59:51 managed-node3 platform-python[27338]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/network/tcpqfulldocookies _original_basename=tcpqfulldocookies recurse=False state=file path=/etc/pcp/pmieconf/network/tcpqfulldocookies force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:52 managed-node3 platform-python[27461]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/network/tcpqfulldrops follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 24 12:59:52 managed-node3 platform-python[27525]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/network/tcpqfulldrops _original_basename=tcpqfulldrops recurse=False state=file path=/etc/pcp/pmieconf/network/tcpqfulldrops force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:52 managed-node3 platform-python[27648]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/power/thermal_throttle follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 24 12:59:52 managed-node3 platform-python[27712]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/power/thermal_throttle _original_basename=thermal_throttle recurse=False state=file path=/etc/pcp/pmieconf/power/thermal_throttle force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:53 managed-node3 platform-python[27835]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/zeroconf/all_threads follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 24 12:59:53 managed-node3 platform-python[27899]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/zeroconf/all_threads _original_basename=all_threads recurse=False state=file path=/etc/pcp/pmieconf/zeroconf/all_threads force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:53 managed-node3 platform-python[28022]: ansible-ansible.legacy.stat Invoked with path=/etc/pcp/pmieconf/filesys/vfs_files follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 24 12:59:54 managed-node3 platform-python[28086]: ansible-ansible.legacy.file Invoked with owner=root group=root mode=0644 dest=/etc/pcp/pmieconf/filesys/vfs_files _original_basename=vfs_files recurse=False state=file path=/etc/pcp/pmieconf/filesys/vfs_files force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:54 managed-node3 platform-python[28209]: ansible-lineinfile Invoked with state=absent path=/var/lib/pcp/config/pmie/config.default regexp=//.*global webhook_endpoint = "" backrefs=False create=False backup=False firstmatch=False unsafe_writes=False search_string=None line=None insertafter=None insertbefore=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:55 managed-node3 platform-python[28332]: ansible-file Invoked with src=/etc/pcp/pmieconf/network/tcplistenoverflows dest=/var/lib/pcp/config/pmieconf/network/tcplistenoverflows state=link force=True path=/var/lib/pcp/config/pmieconf/network/tcplistenoverflows recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:55 managed-node3 platform-python[28455]: ansible-file Invoked with src=/etc/pcp/pmieconf/network/tcpqfulldocookies dest=/var/lib/pcp/config/pmieconf/network/tcpqfulldocookies state=link force=True path=/var/lib/pcp/config/pmieconf/network/tcpqfulldocookies recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:55 managed-node3 platform-python[28578]: ansible-file Invoked with src=/etc/pcp/pmieconf/network/tcpqfulldrops dest=/var/lib/pcp/config/pmieconf/network/tcpqfulldrops state=link force=True path=/var/lib/pcp/config/pmieconf/network/tcpqfulldrops recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:56 managed-node3 platform-python[28701]: ansible-file Invoked with src=/etc/pcp/pmieconf/power/thermal_throttle dest=/var/lib/pcp/config/pmieconf/power/thermal_throttle state=link force=True path=/var/lib/pcp/config/pmieconf/power/thermal_throttle recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:56 managed-node3 platform-python[28824]: ansible-file Invoked with src=/etc/pcp/pmieconf/zeroconf/all_threads dest=/var/lib/pcp/config/pmieconf/zeroconf/all_threads state=link force=True path=/var/lib/pcp/config/pmieconf/zeroconf/all_threads recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:56 managed-node3 platform-python[28947]: ansible-file Invoked with src=/etc/pcp/pmieconf/filesys/vfs_files dest=/var/lib/pcp/config/pmieconf/filesys/vfs_files state=link force=True path=/var/lib/pcp/config/pmieconf/filesys/vfs_files recurse=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:57 managed-node3 platform-python[29070]: ansible-ansible.legacy.systemd Invoked with name=pmie state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jun 24 12:59:58 managed-node3 platform-python[29197]: ansible-lineinfile Invoked with path=/etc/pcp.conf regexp=^PCP_ARCHIVE_DIR= line=PCP_ARCHIVE_DIR=/var/log/pcp/pmlogger state=present backrefs=False create=False backup=False firstmatch=False unsafe_writes=False search_string=None insertafter=None insertbefore=None validate=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:58 managed-node3 platform-python[29320]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmlogger follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 24 12:59:58 managed-node3 platform-python[29384]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/sysconfig/pmlogger _original_basename=pmlogger.defaults.j2 recurse=False state=file path=/etc/sysconfig/pmlogger force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:59 managed-node3 platform-python[29507]: ansible-ansible.legacy.stat Invoked with path=/etc/sysconfig/pmlogger_timers follow=False get_checksum=True checksum_algorithm=sha1 get_mime=True get_attributes=True Jun 24 12:59:59 managed-node3 platform-python[29571]: ansible-ansible.legacy.file Invoked with mode=0644 dest=/etc/sysconfig/pmlogger_timers _original_basename=pmlogger.timers.j2 recurse=False state=file path=/etc/sysconfig/pmlogger_timers force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jun 24 12:59:59 managed-node3 platform-python[29694]: ansible-ansible.legacy.systemd Invoked with name=pmlogger state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jun 24 13:00:01 managed-node3 platform-python[29821]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex echo '##################' echo List of SELinux AVCs - note list may be empty grep type=AVC /var/log/audit/audit.log echo '##################' ls -alrtF /run if [ -d /run/pcp ]; then ls -alrtF /run/pcp else echo ERROR - /run/pcp does not exist fi _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None ################## List of SELinux AVCs - note list may be empty ################## total 40 dr-xr-xr-x. 17 root root 224 May 29 2024 ../ drwxr-xr-x. 2 root root 60 Jun 24 12:52 tmpfiles.d/ drwxr-xr-x. 3 root root 60 Jun 24 12:52 log/ drwxr-xr-x. 2 root root 40 Jun 24 12:52 mount/ drwxr-xr-x. 4 root root 100 Jun 24 12:52 initramfs/ -r--r--r--. 1 root root 33 Jun 24 12:52 machine-id srw-rw-rw-. 1 root root 0 Jun 24 12:52 rpcbind.sock= prw-------. 1 root root 0 Jun 24 12:52 initctl| drwx------. 2 root root 40 Jun 24 12:52 cryptsetup/ drwxr-xr-x. 2 root root 40 Jun 24 12:52 setrans/ drwxr-xr-x. 2 root root 40 Jun 24 12:52 sepermit/ drwxr-xr-x. 2 root root 40 Jun 24 12:52 motd.d/ drwxr-xr-x. 2 root root 40 Jun 24 12:52 faillock/ drwxr-xr-x. 2 root root 40 Jun 24 12:52 console/ drwx--x--x. 3 root root 60 Jun 24 12:52 sudo/ drwx------. 2 rpc rpc 60 Jun 24 12:52 rpcbind/ -rw-r--r--. 1 root root 4 Jun 24 12:52 auditd.pid srw-rw-rw-. 1 root root 0 Jun 24 12:52 .heim_org.h5l.kcm-socket= drwxr-xr-x. 2 root root 60 Jun 24 12:52 dbus/ drwxr-xr-x. 2 root root 60 Jun 24 12:52 irqbalance/ -rw-r--r--. 1 root root 4 Jun 24 12:52 dhclient.pid -rw-r--r--. 1 root root 698 Jun 24 12:52 dhclient.lease -rw-------. 1 root root 4 Jun 24 12:52 gssproxy.pid srw-rw-rw-. 1 root root 0 Jun 24 12:52 gssproxy.sock= drwxr-xr-x. 2 root root 100 Jun 24 12:52 chrony-helper/ drwxr-x---. 2 chrony chrony 80 Jun 24 12:52 chrony/ drwxr-xr-x. 2 root root 60 Jun 24 12:52 tuned/ -rw-r--r--. 1 root root 5 Jun 24 12:53 sshd.pid -rw-------. 1 root root 5 Jun 24 12:53 sm-notify.pid drwxr-xr-x. 3 root root 80 Jun 24 12:53 lock/ -rw-------. 1 root root 4 Jun 24 12:53 rsyslogd.pid drwxr-xr-x. 2 root root 40 Jun 24 12:53 plymouth/ -rw-r--r--. 1 root root 5 Jun 24 12:53 crond.pid ----------. 1 root root 0 Jun 24 12:53 cron.reboot drwx------. 3 root root 340 Jun 24 12:53 cloud-init/ drwxr-xr-x. 2 root root 80 Jun 24 12:53 blkid/ -rw-------. 1 root root 0 Jun 24 12:53 agetty.reload drwxr-xr-x. 3 root root 60 Jun 24 12:54 user/ drwxr-xr-x. 6 root root 160 Jun 24 12:55 NetworkManager/ drwxr-xr-x. 28 root root 880 Jun 24 12:58 ./ drwxr-xr-x. 7 root root 160 Jun 24 12:58 udev/ drwxr-xr-x. 18 root root 460 Jun 24 12:59 systemd/ drwxrwxr-x. 2 pcp pcp 160 Jun 24 12:59 pcp/ -rw-rw-r--. 1 root utmp 1536 Jun 24 13:00 utmp total 12 drwxr-xr-x. 28 root root 880 Jun 24 12:58 ../ srw-rw-rw-. 1 root root 0 Jun 24 12:59 pmcd.socket= -r--r--r--. 1 root root 5 Jun 24 12:59 pmcd.pid -r--r--r--. 1 pcp pcp 5 Jun 24 12:59 pmie.pid -r--r--r--. 1 pcp pcp 5 Jun 24 12:59 pmlogger.pid srw-rw-rw-. 1 pcp pcp 0 Jun 24 12:59 pmlogger.23035.socket= lrwxrwxrwx. 1 pcp pcp 30 Jun 24 12:59 pmlogger.primary.socket -> /run/pcp/pmlogger.23035.socket= drwxrwxr-x. 2 pcp pcp 160 Jun 24 12:59 ./ TASK [Reraise error] *********************************************************** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/tests/metrics/handle_test_failure.yml:17 Tuesday 24 June 2025 13:00:01 -0400 (0:00:00.428) 0:00:31.153 ********** fatal: [managed-node3]: FAILED! => {} MSG: The task includes an option with an undefined variable. The error was: 'ansible_failed_result' is undefined. 'ansible_failed_result' is undefined The error appears to be in '/tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/tests/metrics/handle_test_failure.yml': line 17, column 3, but may be elsewhere in the file depending on the exact syntax problem. The offending line appears to be: - name: Reraise error ^ here TASK [Get final state of services] ********************************************* task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/tests/metrics/restore_services_state.yml:3 Tuesday 24 June 2025 13:00:01 -0400 (0:00:00.020) 0:00:31.173 ********** ok: [managed-node3] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "auto-cpufreq.service": { "name": "auto-cpufreq.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "avahi-daemon.service": { "name": "avahi-daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "chrony-dnssrv@.service": { "name": "chrony-dnssrv@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "static" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus-org.freedesktop.portable1.service": { "name": "dbus-org.freedesktop.portable1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "running", "status": "static" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "halt-local.service": { "name": "halt-local.service", "source": "systemd", "state": "inactive", "status": "static" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "import-state.service": { "name": "import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "iprdump.service": { "name": "iprdump.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprinit.service": { "name": "iprinit.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprupdate.service": { "name": "iprupdate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "loadmodules.service": { "name": "loadmodules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "messagebus.service": { "name": "messagebus.service", "source": "systemd", "state": "active", "status": "static" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-convert.service": { "name": "nfs-convert.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-halt.service": { "name": "plymouth-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-kexec.service": { "name": "plymouth-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-poweroff.service": { "name": "plymouth-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-quit.service": { "name": "plymouth-quit.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-read-write.service": { "name": "plymouth-read-write.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-reboot.service": { "name": "plymouth-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-switch-root-initramfs.service": { "name": "plymouth-switch-root-initramfs.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-switch-root.service": { "name": "plymouth-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmcd.service": { "name": "pmcd.service", "source": "systemd", "state": "running", "status": "enabled" }, "pmfind.service": { "name": "pmfind.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "pmie.service": { "name": "pmie.service", "source": "systemd", "state": "running", "status": "enabled" }, "pmie_check.service": { "name": "pmie_check.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmie_daily.service": { "name": "pmie_daily.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmie_farm.service": { "name": "pmie_farm.service", "source": "systemd", "state": "running", "status": "disabled" }, "pmie_farm_check.service": { "name": "pmie_farm_check.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmlogger.service": { "name": "pmlogger.service", "source": "systemd", "state": "running", "status": "enabled" }, "pmlogger_check.service": { "name": "pmlogger_check.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmlogger_daily.service": { "name": "pmlogger_daily.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmlogger_daily_report.service": { "name": "pmlogger_daily_report.service", "source": "systemd", "state": "inactive", "status": "static" }, "pmlogger_farm.service": { "name": "pmlogger_farm.service", "source": "systemd", "state": "running", "status": "disabled" }, "pmlogger_farm_check.service": { "name": "pmlogger_farm_check.service", "source": "systemd", "state": "stopped", "status": "static" }, "pmproxy.service": { "name": "pmproxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "power-profiles-daemon.service": { "name": "power-profiles-daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "snapd.seeded.service": { "name": "snapd.seeded.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "active", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-plymouth.service": { "name": "systemd-ask-password-plymouth.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-portabled.service": { "name": "systemd-portabled.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-resolved.service": { "name": "systemd-resolved.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "masked" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "tcsd.service": { "name": "tcsd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "timedatex.service": { "name": "timedatex.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "tlp.service": { "name": "tlp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "tuned.service": { "name": "tuned.service", "source": "systemd", "state": "running", "status": "enabled" }, "unbound-anchor.service": { "name": "unbound-anchor.service", "source": "systemd", "state": "stopped", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "yppasswdd.service": { "name": "yppasswdd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypserv.service": { "name": "ypserv.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ypxfrd.service": { "name": "ypxfrd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "zabbix-agent.service": { "name": "zabbix-agent.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [Restore state of services] *********************************************** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/tests/metrics/restore_services_state.yml:9 Tuesday 24 June 2025 13:00:02 -0400 (0:00:01.761) 0:00:32.935 ********** ok: [managed-node3] => (item=pmcd) => { "ansible_loop_var": "item", "changed": false, "item": "pmcd", "name": "pmcd", "state": "started", "status": { "ActiveEnterTimestamp": "Tue 2025-06-24 12:59:09 EDT", "ActiveEnterTimestampMonotonic": "389230875", "ActiveExitTimestamp": "Tue 2025-06-24 12:59:08 EDT", "ActiveExitTimestampMonotonic": "388150858", "ActiveState": "active", "After": "basic.target system.slice avahi-daemon.service systemd-journald.socket sysinit.target network-online.target", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "yes", "AssertTimestamp": "Tue 2025-06-24 12:59:08 EDT", "AssertTimestampMonotonic": "388917276", "Before": "zabbix-agent.service pmlogger.service multi-user.target pmie.service shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Tue 2025-06-24 12:59:08 EDT", "ConditionTimestampMonotonic": "388917274", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/system.slice/pmcd.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Performance Metrics Collector Daemon", "DevicePolicy": "auto", "Documentation": "man:pmcd(1)", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "17163", "ExecMainStartTimestamp": "Tue 2025-06-24 12:59:09 EDT", "ExecMainStartTimestampMonotonic": "389230851", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/libexec/pcp/lib/pmcd ; argv[]=/usr/libexec/pcp/lib/pmcd start-systemd ; ignore_errors=no ; start_time=[Tue 2025-06-24 12:59:08 EDT] ; stop_time=[n/a] ; pid=17067 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/libexec/pcp/lib/pmcd ; argv[]=/usr/libexec/pcp/lib/pmcd stop-systemd ; ignore_errors=no ; start_time=[Tue 2025-06-24 12:59:08 EDT] ; stop_time=[Tue 2025-06-24 12:59:08 EDT] ; pid=16702 ; code=exited ; status=0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/pmcd.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "pmcd.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestamp": "Tue 2025-06-24 12:59:08 EDT", "InactiveEnterTimestampMonotonic": "388916458", "InactiveExitTimestamp": "Tue 2025-06-24 12:59:08 EDT", "InactiveExitTimestampMonotonic": "388918121", "InvocationID": "a8f7e69db5ca42d7815ce0ec09a1c3e5", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14004", "LimitNPROCSoft": "14004", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14004", "LimitSIGPENDINGSoft": "14004", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "17163", "MemoryAccounting": "yes", "MemoryCurrent": "37498880", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "pmcd.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PIDFile": "/run/pcp/pmcd.pid", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice sysinit.target", "Restart": "always", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Tue 2025-06-24 12:59:09 EDT", "StateChangeTimestampMonotonic": "389230875", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "9", "TasksMax": "22406", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target pmie.service pmlogger.service", "WatchdogTimestamp": "Tue 2025-06-24 12:59:09 EDT", "WatchdogTimestampMonotonic": "389230871", "WatchdogUSec": "0" } } ok: [managed-node3] => (item=pmlogger) => { "ansible_loop_var": "item", "changed": false, "item": "pmlogger", "name": "pmlogger", "state": "started", "status": { "ActiveEnterTimestamp": "Tue 2025-06-24 12:59:25 EDT", "ActiveEnterTimestampMonotonic": "405552152", "ActiveExitTimestamp": "Tue 2025-06-24 12:59:24 EDT", "ActiveExitTimestampMonotonic": "404895614", "ActiveState": "active", "After": "basic.target pmcd.service system.slice network-online.target sysinit.target systemd-journald.socket", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "yes", "AssertTimestamp": "Tue 2025-06-24 12:59:24 EDT", "AssertTimestampMonotonic": "404968961", "Before": "pmlogger_check.timer pmlogger_daily.timer shutdown.target multi-user.target pmlogger_farm.service", "BindsTo": "pmlogger_check.timer pmlogger_daily.timer pmlogger_farm.service", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Tue 2025-06-24 12:59:24 EDT", "ConditionTimestampMonotonic": "404968960", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ConsistsOf": "pmlogger_farm.service", "ControlGroup": "/system.slice/pmlogger.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Performance Metrics Archive Logger", "DevicePolicy": "auto", "Documentation": "man:pmlogger(1)", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "Environment": "PMLOGGER_CHECK_PARAMS=--only-primary", "EnvironmentFiles": "/etc/sysconfig/pmlogger (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "23035", "ExecMainStartTimestamp": "Tue 2025-06-24 12:59:25 EDT", "ExecMainStartTimestampMonotonic": "405552125", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/libexec/pcp/lib/pmlogger ; argv[]=/usr/libexec/pcp/lib/pmlogger start-systemd ; ignore_errors=no ; start_time=[Tue 2025-06-24 12:59:24 EDT] ; stop_time=[n/a] ; pid=22811 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/libexec/pcp/lib/pmlogger ; argv[]=/usr/libexec/pcp/lib/pmlogger stop-systemd ; ignore_errors=no ; start_time=[Tue 2025-06-24 12:59:24 EDT] ; stop_time=[Tue 2025-06-24 12:59:24 EDT] ; pid=22753 ; code=exited ; status=0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/pmlogger.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "pmlogger.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestamp": "Tue 2025-06-24 12:59:24 EDT", "InactiveEnterTimestampMonotonic": "404968092", "InactiveExitTimestamp": "Tue 2025-06-24 12:59:24 EDT", "InactiveExitTimestampMonotonic": "404970097", "InvocationID": "05ac186e5255420a83037767a5665f85", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14004", "LimitNPROCSoft": "14004", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14004", "LimitSIGPENDINGSoft": "14004", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "23035", "MemoryAccounting": "yes", "MemoryCurrent": "2965504", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "pmlogger.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PIDFile": "/run/pcp/pmlogger.pid", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice sysinit.target", "Restart": "always", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Tue 2025-06-24 12:59:25 EDT", "StateChangeTimestampMonotonic": "405552152", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "1", "TasksMax": "22406", "TimeoutStartUSec": "2min", "TimeoutStopUSec": "2min", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "pmcd.service", "WatchdogTimestamp": "Tue 2025-06-24 12:59:25 EDT", "WatchdogTimestampMonotonic": "405552149", "WatchdogUSec": "0" } } ok: [managed-node3] => (item=pmie) => { "ansible_loop_var": "item", "changed": false, "item": "pmie", "name": "pmie", "state": "started", "status": { "ActiveEnterTimestamp": "Tue 2025-06-24 12:59:20 EDT", "ActiveEnterTimestampMonotonic": "400548123", "ActiveExitTimestamp": "Tue 2025-06-24 12:59:20 EDT", "ActiveExitTimestampMonotonic": "400082447", "ActiveState": "active", "After": "systemd-journald.socket network-online.target basic.target system.slice sysinit.target pmcd.service", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "yes", "AssertTimestamp": "Tue 2025-06-24 12:59:20 EDT", "AssertTimestampMonotonic": "400318242", "Before": "multi-user.target pmie_farm.service shutdown.target pmie_daily.timer pmie_check.timer", "BindsTo": "pmie_farm.service pmie_daily.timer pmie_check.timer", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Tue 2025-06-24 12:59:20 EDT", "ConditionTimestampMonotonic": "400318240", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ConsistsOf": "pmie_farm.service", "ControlGroup": "/system.slice/pmie.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Performance Metrics Inference Engine", "DevicePolicy": "auto", "Documentation": "man:pmie(1)", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "Environment": "PMIE_CHECK_PARAMS=--only-primary", "EnvironmentFiles": "/etc/sysconfig/pmie (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "21281", "ExecMainStartTimestamp": "Tue 2025-06-24 12:59:20 EDT", "ExecMainStartTimestampMonotonic": "400548101", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/libexec/pcp/lib/pmie ; argv[]=/usr/libexec/pcp/lib/pmie start-systemd ; ignore_errors=no ; start_time=[Tue 2025-06-24 12:59:20 EDT] ; stop_time=[n/a] ; pid=21064 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/libexec/pcp/lib/pmie ; argv[]=/usr/libexec/pcp/lib/pmie stop-systemd ; ignore_errors=no ; start_time=[Tue 2025-06-24 12:59:20 EDT] ; stop_time=[Tue 2025-06-24 12:59:20 EDT] ; pid=20852 ; code=exited ; status=0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/pmie.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "pmie.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestamp": "Tue 2025-06-24 12:59:20 EDT", "InactiveEnterTimestampMonotonic": "400317248", "InactiveExitTimestamp": "Tue 2025-06-24 12:59:20 EDT", "InactiveExitTimestampMonotonic": "400319537", "InvocationID": "55b7c6a0971a48c98a1491eb4e8d7872", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14004", "LimitNPROCSoft": "14004", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14004", "LimitSIGPENDINGSoft": "14004", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "21281", "MemoryAccounting": "yes", "MemoryCurrent": "1605632", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "pmie.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PIDFile": "/run/pcp/pmie.pid", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice sysinit.target", "Restart": "always", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Tue 2025-06-24 12:59:20 EDT", "StateChangeTimestampMonotonic": "400548123", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "1", "TasksMax": "22406", "TimeoutStartUSec": "2min", "TimeoutStopUSec": "2min", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "pmcd.service", "WatchdogTimestamp": "Tue 2025-06-24 12:59:20 EDT", "WatchdogTimestampMonotonic": "400548120", "WatchdogUSec": "0" } } ok: [managed-node3] => (item=pmproxy) => { "ansible_loop_var": "item", "changed": false, "item": "pmproxy", "name": "pmproxy", "state": "stopped", "status": { "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "avahi-daemon.service pmcd.service basic.target network-online.target redis.service system.slice systemd-journald.socket sysinit.target", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "Proxy for Performance Metrics Collector Daemon", "DevicePolicy": "auto", "Documentation": "man:pmproxy(1)", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/libexec/pcp/lib/pmproxy ; argv[]=/usr/libexec/pcp/lib/pmproxy start-systemd ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/usr/lib/systemd/system/pmproxy.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "pmproxy.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14004", "LimitNPROCSoft": "14004", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14004", "LimitSIGPENDINGSoft": "14004", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "MemoryAccounting": "yes", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "pmproxy.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice sysinit.target", "Restart": "always", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22406", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "disabled", "UtmpMode": "init", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } skipping: [managed-node3] => (item=redis) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item + '.service' in final_state.ansible_facts.services", "item": "redis", "skip_reason": "Conditional result was False" } skipping: [managed-node3] => (item=valkey) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item + '.service' in final_state.ansible_facts.services", "item": "valkey", "skip_reason": "Conditional result was False" } skipping: [managed-node3] => (item=grafana-server) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item + '.service' in final_state.ansible_facts.services", "item": "grafana-server", "skip_reason": "Conditional result was False" } TASK [Stop firewall] *********************************************************** task path: /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/tests/metrics/restore_services_state.yml:30 Tuesday 24 June 2025 13:00:05 -0400 (0:00:02.093) 0:00:35.028 ********** skipping: [managed-node3] => { "changed": false, "false_condition": "metrics_manage_firewall | bool", "skip_reason": "Conditional result was False" } PLAY RECAP ********************************************************************* managed-node3 : ok=48 changed=0 unreachable=0 failed=1 skipped=35 rescued=0 ignored=0 SYSTEM ROLES ERRORS BEGIN v1 [ { "ansible_version": "2.16.14", "end_time": "2025-06-24T17:00:01.166628+00:00Z", "host": "managed-node3", "message": "The task includes an option with an undefined variable. The error was: 'ansible_failed_result' is undefined. 'ansible_failed_result' is undefined\n\nThe error appears to be in '/tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/tests/metrics/handle_test_failure.yml': line 17, column 3, but may\nbe elsewhere in the file depending on the exact syntax problem.\n\nThe offending line appears to be:\n\n\n- name: Reraise error\n ^ here\n", "start_time": "2025-06-24T17:00:01.155335+00:00Z", "task_name": "Reraise error", "task_path": "/tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/tests/metrics/handle_test_failure.yml:17" } ] SYSTEM ROLES ERRORS END v1 TASKS RECAP ******************************************************************** Tuesday 24 June 2025 13:00:05 -0400 (0:00:00.036) 0:00:35.065 ********** =============================================================================== fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure extra performance rules are installed for targeted hosts --- 3.64s /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:24 fedora.linux_system_roles.private_metrics_subrole_pcp : Install Performance Co-Pilot packages --- 2.99s /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/main.yml:47 fedora.linux_system_roles.private_metrics_subrole_pcp : Install authentication packages --- 2.91s /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/main.yml:53 fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure extra rules symlinks have been created for targeted hosts --- 2.24s /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:86 Restore state of services ----------------------------------------------- 2.09s /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/tests/metrics/restore_services_state.yml:9 Get initial state of services ------------------------------------------- 1.82s /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/tests/metrics/get_services_state.yml:3 Get final state of services --------------------------------------------- 1.76s /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/tests/metrics/restore_services_state.yml:3 fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure extra performance rule group link directories exist --- 1.42s /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:14 fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure extra performance rule group directories exist --- 1.39s /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:4 fedora.linux_system_roles.private_metrics_subrole_pcp : Extract metric collection configuration file content --- 1.35s /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:9 Gathering Facts --------------------------------------------------------- 1.12s /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/tests/metrics/tests_verify_basic.yml:9 fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric collector is running and enabled on boot --- 0.76s /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:110 fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric logging is configured --- 0.67s /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmlogger.yml:12 fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure any implicit metric labels are configured --- 0.66s /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:46 fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure any explicit metric labels are configured --- 0.66s /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:39 fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric collector authentication is configured --- 0.65s /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:86 fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric logging retention period is set --- 0.65s /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmlogger.yml:19 fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric collector is configured --- 0.63s /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:53 fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric collector system accounts are configured --- 0.54s /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmcd.yml:60 fedora.linux_system_roles.private_metrics_subrole_pcp : Ensure performance metric inference is running and enabled on boot --- 0.53s /tmp/collections-wk5/ansible_collections/fedora/linux_system_roles/roles/private_metrics_subrole_pcp/tasks/pmie.yml:120