ansible-playbook 2.9.27 config file = None configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /usr/local/lib/python3.9/site-packages/ansible executable location = /usr/local/bin/ansible-playbook python version = 3.9.19 (main, May 16 2024, 11:40:09) [GCC 8.5.0 20210514 (Red Hat 8.5.0-22)] No config file found; using defaults [WARNING]: running playbook inside collection fedora.linux_system_roles Skipping callback 'actionable', as we already have a stdout callback. Skipping callback 'counter_enabled', as we already have a stdout callback. Skipping callback 'debug', as we already have a stdout callback. Skipping callback 'dense', as we already have a stdout callback. Skipping callback 'dense', as we already have a stdout callback. Skipping callback 'full_skip', as we already have a stdout callback. Skipping callback 'json', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'null', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. Skipping callback 'selective', as we already have a stdout callback. Skipping callback 'skippy', as we already have a stdout callback. Skipping callback 'stderr', as we already have a stdout callback. Skipping callback 'unixy', as we already have a stdout callback. Skipping callback 'yaml', as we already have a stdout callback. PLAYBOOK: tests_quadlet_basic.yml ********************************************** 2 plays in /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml PLAY [all] ********************************************************************* META: ran handlers TASK [Include vault variables] ************************************************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:5 Wednesday 02 April 2025 12:17:45 -0400 (0:00:00.022) 0:00:00.022 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_test_password": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n35383939616163653333633431363463313831383037386236646138333162396161356130303461\n3932623930643263313563336163316337643562333936360a363538636631313039343233383732\n38666530383538656639363465313230343533386130303833336434303438333161656262346562\n3362626538613031640a663330613638366132356534363534353239616666653466353961323533\n6565\n" }, "mysql_container_root_password": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n61333932373230333539663035366431326163363166363036323963623131363530326231303634\n6635326161643165363366323062333334363730376631660a393566366139353861656364656661\n38653463363837336639363032646433666361646535366137303464623261313663643336306465\n6264663730656337310a343962353137386238383064646533366433333437303566656433386233\n34343235326665646661623131643335313236313131353661386338343366316261643634653633\n3832313034366536616531323963333234326461353130303532\n" } }, "ansible_included_var_files": [ "/tmp/podman-keE/tests/vars/vault-variables.yml" ], "changed": false } META: ran handlers META: ran handlers PLAY [Ensure that the role can manage quadlet specs] *************************** TASK [Gathering Facts] ********************************************************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:9 Wednesday 02 April 2025 12:17:45 -0400 (0:00:00.022) 0:00:00.044 ******* ok: [managed-node1] META: ran handlers TASK [Test is only supported on x86_64] **************************************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:55 Wednesday 02 April 2025 12:17:46 -0400 (0:00:01.009) 0:00:01.054 ******* skipping: [managed-node1] => {} META: TASK [Run role - do not pull images] ******************************************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:70 Wednesday 02 April 2025 12:17:46 -0400 (0:00:00.066) 0:00:01.121 ******* TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Wednesday 02 April 2025 12:17:46 -0400 (0:00:00.083) 0:00:01.204 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Wednesday 02 April 2025 12:17:46 -0400 (0:00:00.033) 0:00:01.238 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Wednesday 02 April 2025 12:17:46 -0400 (0:00:00.021) 0:00:01.259 ******* ok: [managed-node1] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Wednesday 02 April 2025 12:17:47 -0400 (0:00:00.534) 0:00:01.793 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Wednesday 02 April 2025 12:17:47 -0400 (0:00:00.037) 0:00:01.831 ******* ok: [managed-node1] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Wednesday 02 April 2025 12:17:47 -0400 (0:00:00.358) 0:00:02.190 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_is_transactional": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Wednesday 02 April 2025 12:17:47 -0400 (0:00:00.025) 0:00:02.216 ******* ok: [managed-node1] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node1] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node1] => (item=CentOS_8.yml) => { "ansible_facts": { "__podman_packages": [ "crun", "podman", "podman-plugins", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [managed-node1] => (item=CentOS_8.yml) => { "ansible_facts": { "__podman_packages": [ "crun", "podman", "podman-plugins", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Wednesday 02 April 2025 12:17:47 -0400 (0:00:00.077) 0:00:02.293 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Wednesday 02 April 2025 12:17:49 -0400 (0:00:01.784) 0:00:04.077 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Wednesday 02 April 2025 12:17:49 -0400 (0:00:00.034) 0:00:04.112 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Wednesday 02 April 2025 12:17:49 -0400 (0:00:00.044) 0:00:04.156 ******* skipping: [managed-node1] => {} TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Wednesday 02 April 2025 12:17:49 -0400 (0:00:00.037) 0:00:04.193 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Wednesday 02 April 2025 12:17:49 -0400 (0:00:00.035) 0:00:04.229 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Wednesday 02 April 2025 12:17:49 -0400 (0:00:00.036) 0:00:04.265 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.027166", "end": "2025-04-02 12:17:50.381516", "rc": 0, "start": "2025-04-02 12:17:50.354350" } STDOUT: podman version 4.9.4-dev TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Wednesday 02 April 2025 12:17:50 -0400 (0:00:00.520) 0:00:04.786 ******* ok: [managed-node1] => { "ansible_facts": { "podman_version": "4.9.4-dev" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Wednesday 02 April 2025 12:17:50 -0400 (0:00:00.037) 0:00:04.823 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Wednesday 02 April 2025 12:17:50 -0400 (0:00:00.035) 0:00:04.859 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } META: end_host conditional evaluated to false, continuing execution for managed-node1 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Wednesday 02 April 2025 12:17:50 -0400 (0:00:00.063) 0:00:04.923 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } META: end_host conditional evaluated to false, continuing execution for managed-node1 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Wednesday 02 April 2025 12:17:50 -0400 (0:00:00.103) 0:00:05.026 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:17:50 -0400 (0:00:00.106) 0:00:05.133 ******* ok: [managed-node1] => { "ansible_facts": { "getent_passwd": { "root": [ "x", "0", "0", "root", "/root", "/bin/bash" ] } }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:17:51 -0400 (0:00:00.628) 0:00:05.761 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:17:51 -0400 (0:00:00.042) 0:00:05.803 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:17:51 -0400 (0:00:00.049) 0:00:05.853 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610609.2783134, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1743610533.3987217, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986653, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "4263604762", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:17:51 -0400 (0:00:00.411) 0:00:06.264 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:17:51 -0400 (0:00:00.059) 0:00:06.324 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:17:52 -0400 (0:00:00.060) 0:00:06.384 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:17:52 -0400 (0:00:00.117) 0:00:06.501 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:17:52 -0400 (0:00:00.069) 0:00:06.571 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:17:52 -0400 (0:00:00.058) 0:00:06.629 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:17:52 -0400 (0:00:00.060) 0:00:06.690 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:17:52 -0400 (0:00:00.057) 0:00:06.748 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Wednesday 02 April 2025 12:17:52 -0400 (0:00:00.059) 0:00:06.808 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_parent_mode": "0755", "__podman_parent_path": "/etc/containers", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:126 Wednesday 02 April 2025 12:17:52 -0400 (0:00:00.115) 0:00:06.923 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Wednesday 02 April 2025 12:17:52 -0400 (0:00:00.084) 0:00:07.008 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Wednesday 02 April 2025 12:17:52 -0400 (0:00:00.041) 0:00:07.049 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:129 Wednesday 02 April 2025 12:17:52 -0400 (0:00:00.034) 0:00:07.083 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Wednesday 02 April 2025 12:17:52 -0400 (0:00:00.072) 0:00:07.156 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Wednesday 02 April 2025 12:17:52 -0400 (0:00:00.034) 0:00:07.190 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:132 Wednesday 02 April 2025 12:17:52 -0400 (0:00:00.042) 0:00:07.233 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:7 Wednesday 02 April 2025 12:17:53 -0400 (0:00:00.115) 0:00:07.349 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:15 Wednesday 02 April 2025 12:17:53 -0400 (0:00:00.056) 0:00:07.405 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:135 Wednesday 02 April 2025 12:17:53 -0400 (0:00:00.113) 0:00:07.518 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:8 Wednesday 02 April 2025 12:17:53 -0400 (0:00:00.141) 0:00:07.659 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:16 Wednesday 02 April 2025 12:17:53 -0400 (0:00:00.056) 0:00:07.716 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:21 Wednesday 02 April 2025 12:17:53 -0400 (0:00:00.056) 0:00:07.772 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:27 Wednesday 02 April 2025 12:17:53 -0400 (0:00:00.056) 0:00:07.828 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:141 Wednesday 02 April 2025 12:17:53 -0400 (0:00:00.059) 0:00:07.888 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:148 Wednesday 02 April 2025 12:17:53 -0400 (0:00:00.055) 0:00:07.943 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:155 Wednesday 02 April 2025 12:17:53 -0400 (0:00:00.056) 0:00:08.000 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:159 Wednesday 02 April 2025 12:17:53 -0400 (0:00:00.055) 0:00:08.055 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:168 Wednesday 02 April 2025 12:17:53 -0400 (0:00:00.051) 0:00:08.107 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:177 Wednesday 02 April 2025 12:17:53 -0400 (0:00:00.053) 0:00:08.161 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:184 Wednesday 02 April 2025 12:17:53 -0400 (0:00:00.051) 0:00:08.212 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:191 Wednesday 02 April 2025 12:17:53 -0400 (0:00:00.055) 0:00:08.268 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Wednesday 02 April 2025 12:17:54 -0400 (0:00:00.143) 0:00:08.411 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Container": { "ContainerName": "nopull", "Image": "quay.io/libpod/testimage:20210610" }, "Install": { "WantedBy": "default.target" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Wednesday 02 April 2025 12:17:54 -0400 (0:00:00.070) 0:00:08.481 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": false, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Wednesday 02 April 2025 12:17:54 -0400 (0:00:00.069) 0:00:08.551 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Wednesday 02 April 2025 12:17:54 -0400 (0:00:00.122) 0:00:08.673 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_name": "nopull", "__podman_quadlet_type": "container", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Wednesday 02 April 2025 12:17:54 -0400 (0:00:00.076) 0:00:08.750 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:17:54 -0400 (0:00:00.107) 0:00:08.858 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:17:54 -0400 (0:00:00.071) 0:00:08.929 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:17:54 -0400 (0:00:00.068) 0:00:08.998 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:17:54 -0400 (0:00:00.078) 0:00:09.076 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610609.2783134, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1743610533.3987217, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986653, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "4263604762", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:17:55 -0400 (0:00:00.443) 0:00:09.520 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:17:55 -0400 (0:00:00.065) 0:00:09.585 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:17:55 -0400 (0:00:00.060) 0:00:09.646 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:17:55 -0400 (0:00:00.083) 0:00:09.729 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:17:55 -0400 (0:00:00.059) 0:00:09.789 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:17:55 -0400 (0:00:00.057) 0:00:09.847 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:17:55 -0400 (0:00:00.059) 0:00:09.907 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:17:55 -0400 (0:00:00.060) 0:00:09.968 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Wednesday 02 April 2025 12:17:55 -0400 (0:00:00.060) 0:00:10.028 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_activate_systemd_unit": false, "__podman_images_found": [ "quay.io/libpod/testimage:20210610" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "nopull.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Wednesday 02 April 2025 12:17:55 -0400 (0:00:00.101) 0:00:10.130 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Wednesday 02 April 2025 12:17:55 -0400 (0:00:00.125) 0:00:10.256 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Wednesday 02 April 2025 12:17:55 -0400 (0:00:00.048) 0:00:10.304 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_images": [ "quay.io/libpod/testimage:20210610" ], "__podman_quadlet_file": "/etc/containers/systemd/nopull.container", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Wednesday 02 April 2025 12:17:56 -0400 (0:00:00.109) 0:00:10.414 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Wednesday 02 April 2025 12:17:56 -0400 (0:00:00.052) 0:00:10.466 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Wednesday 02 April 2025 12:17:56 -0400 (0:00:00.035) 0:00:10.502 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Wednesday 02 April 2025 12:17:56 -0400 (0:00:00.080) 0:00:10.582 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Wednesday 02 April 2025 12:17:56 -0400 (0:00:00.060) 0:00:10.643 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Wednesday 02 April 2025 12:17:56 -0400 (0:00:00.035) 0:00:10.678 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Wednesday 02 April 2025 12:17:56 -0400 (0:00:00.039) 0:00:10.718 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Wednesday 02 April 2025 12:17:56 -0400 (0:00:00.042) 0:00:10.760 ******* TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Wednesday 02 April 2025 12:17:56 -0400 (0:00:00.033) 0:00:10.793 ******* skipping: [managed-node1] => (item=None) => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Wednesday 02 April 2025 12:17:56 -0400 (0:00:00.044) 0:00:10.838 ******* ok: [managed-node1] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Wednesday 02 April 2025 12:17:57 -0400 (0:00:00.532) 0:00:11.370 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Wednesday 02 April 2025 12:17:57 -0400 (0:00:00.035) 0:00:11.406 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Wednesday 02 April 2025 12:17:57 -0400 (0:00:00.087) 0:00:11.493 ******* changed: [managed-node1] => { "changed": true, "checksum": "670d64fc68a9768edb20cad26df2acc703542d85", "dest": "/etc/containers/systemd/nopull.container", "gid": 0, "group": "root", "md5sum": "cedb6667f6cd1b033fe06e2810fe6b19", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 151, "src": "/root/.ansible/tmp/ansible-tmp-1743610677.2053769-12783-250826521273348/source", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Wednesday 02 April 2025 12:17:57 -0400 (0:00:00.791) 0:00:12.285 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Wednesday 02 April 2025 12:17:57 -0400 (0:00:00.042) 0:00:12.328 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Wednesday 02 April 2025 12:17:58 -0400 (0:00:00.043) 0:00:12.371 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:198 Wednesday 02 April 2025 12:17:58 -0400 (0:00:00.048) 0:00:12.420 ******* TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:204 Wednesday 02 April 2025 12:17:58 -0400 (0:00:00.035) 0:00:12.456 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:213 Wednesday 02 April 2025 12:17:58 -0400 (0:00:00.034) 0:00:12.491 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Verify image not pulled] ************************************************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:86 Wednesday 02 April 2025 12:17:58 -0400 (0:00:00.036) 0:00:12.527 ******* ok: [managed-node1] => { "changed": false } MSG: All assertions passed TASK [Run role - try to pull bogus image] ************************************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:90 Wednesday 02 April 2025 12:17:58 -0400 (0:00:00.038) 0:00:12.565 ******* TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Wednesday 02 April 2025 12:17:58 -0400 (0:00:00.088) 0:00:12.654 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Wednesday 02 April 2025 12:17:58 -0400 (0:00:00.059) 0:00:12.714 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Wednesday 02 April 2025 12:17:58 -0400 (0:00:00.048) 0:00:12.763 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Wednesday 02 April 2025 12:17:58 -0400 (0:00:00.101) 0:00:12.864 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Wednesday 02 April 2025 12:17:58 -0400 (0:00:00.038) 0:00:12.902 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Wednesday 02 April 2025 12:17:58 -0400 (0:00:00.038) 0:00:12.940 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Wednesday 02 April 2025 12:17:58 -0400 (0:00:00.035) 0:00:12.976 ******* ok: [managed-node1] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node1] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node1] => (item=CentOS_8.yml) => { "ansible_facts": { "__podman_packages": [ "crun", "podman", "podman-plugins", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [managed-node1] => (item=CentOS_8.yml) => { "ansible_facts": { "__podman_packages": [ "crun", "podman", "podman-plugins", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Wednesday 02 April 2025 12:17:58 -0400 (0:00:00.102) 0:00:13.078 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Wednesday 02 April 2025 12:18:00 -0400 (0:00:01.532) 0:00:14.611 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Wednesday 02 April 2025 12:18:00 -0400 (0:00:00.061) 0:00:14.672 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Wednesday 02 April 2025 12:18:00 -0400 (0:00:00.071) 0:00:14.744 ******* skipping: [managed-node1] => {} TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Wednesday 02 April 2025 12:18:00 -0400 (0:00:00.059) 0:00:14.803 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Wednesday 02 April 2025 12:18:00 -0400 (0:00:00.057) 0:00:14.861 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Wednesday 02 April 2025 12:18:00 -0400 (0:00:00.094) 0:00:14.956 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.027236", "end": "2025-04-02 12:18:00.952292", "rc": 0, "start": "2025-04-02 12:18:00.925056" } STDOUT: podman version 4.9.4-dev TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Wednesday 02 April 2025 12:18:01 -0400 (0:00:00.408) 0:00:15.365 ******* ok: [managed-node1] => { "ansible_facts": { "podman_version": "4.9.4-dev" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Wednesday 02 April 2025 12:18:01 -0400 (0:00:00.052) 0:00:15.417 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Wednesday 02 April 2025 12:18:01 -0400 (0:00:00.057) 0:00:15.475 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } META: end_host conditional evaluated to false, continuing execution for managed-node1 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Wednesday 02 April 2025 12:18:01 -0400 (0:00:00.106) 0:00:15.581 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } META: end_host conditional evaluated to false, continuing execution for managed-node1 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Wednesday 02 April 2025 12:18:01 -0400 (0:00:00.135) 0:00:15.717 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:18:01 -0400 (0:00:00.113) 0:00:15.831 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:18:01 -0400 (0:00:00.072) 0:00:15.903 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:18:01 -0400 (0:00:00.073) 0:00:15.976 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:18:01 -0400 (0:00:00.075) 0:00:16.052 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610609.2783134, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1743610533.3987217, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986653, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "4263604762", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:18:02 -0400 (0:00:00.435) 0:00:16.487 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:18:02 -0400 (0:00:00.061) 0:00:16.549 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:18:02 -0400 (0:00:00.060) 0:00:16.610 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:18:02 -0400 (0:00:00.117) 0:00:16.727 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:18:02 -0400 (0:00:00.061) 0:00:16.788 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:18:02 -0400 (0:00:00.062) 0:00:16.851 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:18:02 -0400 (0:00:00.061) 0:00:16.913 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:18:02 -0400 (0:00:00.073) 0:00:16.986 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Wednesday 02 April 2025 12:18:02 -0400 (0:00:00.050) 0:00:17.037 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_parent_mode": "0755", "__podman_parent_path": "/etc/containers", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:126 Wednesday 02 April 2025 12:18:02 -0400 (0:00:00.067) 0:00:17.104 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Wednesday 02 April 2025 12:18:02 -0400 (0:00:00.084) 0:00:17.188 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Wednesday 02 April 2025 12:18:02 -0400 (0:00:00.038) 0:00:17.227 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:129 Wednesday 02 April 2025 12:18:02 -0400 (0:00:00.037) 0:00:17.265 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Wednesday 02 April 2025 12:18:02 -0400 (0:00:00.069) 0:00:17.335 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Wednesday 02 April 2025 12:18:03 -0400 (0:00:00.035) 0:00:17.370 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:132 Wednesday 02 April 2025 12:18:03 -0400 (0:00:00.036) 0:00:17.407 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:7 Wednesday 02 April 2025 12:18:03 -0400 (0:00:00.071) 0:00:17.478 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:15 Wednesday 02 April 2025 12:18:03 -0400 (0:00:00.038) 0:00:17.516 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:135 Wednesday 02 April 2025 12:18:03 -0400 (0:00:00.049) 0:00:17.565 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:8 Wednesday 02 April 2025 12:18:03 -0400 (0:00:00.145) 0:00:17.711 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:16 Wednesday 02 April 2025 12:18:03 -0400 (0:00:00.035) 0:00:17.747 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:21 Wednesday 02 April 2025 12:18:03 -0400 (0:00:00.037) 0:00:17.784 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:27 Wednesday 02 April 2025 12:18:03 -0400 (0:00:00.037) 0:00:17.822 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:141 Wednesday 02 April 2025 12:18:03 -0400 (0:00:00.036) 0:00:17.858 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:148 Wednesday 02 April 2025 12:18:03 -0400 (0:00:00.037) 0:00:17.896 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:155 Wednesday 02 April 2025 12:18:03 -0400 (0:00:00.036) 0:00:17.933 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:159 Wednesday 02 April 2025 12:18:03 -0400 (0:00:00.036) 0:00:17.970 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:168 Wednesday 02 April 2025 12:18:03 -0400 (0:00:00.044) 0:00:18.014 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:177 Wednesday 02 April 2025 12:18:03 -0400 (0:00:00.035) 0:00:18.050 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:184 Wednesday 02 April 2025 12:18:03 -0400 (0:00:00.036) 0:00:18.087 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:191 Wednesday 02 April 2025 12:18:03 -0400 (0:00:00.033) 0:00:18.121 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Wednesday 02 April 2025 12:18:03 -0400 (0:00:00.088) 0:00:18.210 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Container": { "ContainerName": "bogus", "Image": "this_is_a_bogus_image" }, "Install": { "WantedBy": "default.target" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Wednesday 02 April 2025 12:18:03 -0400 (0:00:00.047) 0:00:18.257 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_continue_if_pull_fails": true, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Wednesday 02 April 2025 12:18:03 -0400 (0:00:00.046) 0:00:18.304 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Wednesday 02 April 2025 12:18:04 -0400 (0:00:00.036) 0:00:18.341 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_name": "bogus", "__podman_quadlet_type": "container", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Wednesday 02 April 2025 12:18:04 -0400 (0:00:00.104) 0:00:18.445 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:18:04 -0400 (0:00:00.068) 0:00:18.513 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:18:04 -0400 (0:00:00.044) 0:00:18.558 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:18:04 -0400 (0:00:00.043) 0:00:18.601 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:18:04 -0400 (0:00:00.052) 0:00:18.654 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610609.2783134, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1743610533.3987217, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986653, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "4263604762", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:18:04 -0400 (0:00:00.384) 0:00:19.038 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:18:04 -0400 (0:00:00.038) 0:00:19.077 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:18:04 -0400 (0:00:00.040) 0:00:19.117 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:18:04 -0400 (0:00:00.037) 0:00:19.155 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:18:04 -0400 (0:00:00.040) 0:00:19.195 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:18:04 -0400 (0:00:00.039) 0:00:19.234 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:18:04 -0400 (0:00:00.038) 0:00:19.273 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:18:04 -0400 (0:00:00.039) 0:00:19.312 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Wednesday 02 April 2025 12:18:05 -0400 (0:00:00.038) 0:00:19.350 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_activate_systemd_unit": false, "__podman_images_found": [ "this_is_a_bogus_image" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "bogus.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Wednesday 02 April 2025 12:18:05 -0400 (0:00:00.067) 0:00:19.418 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Wednesday 02 April 2025 12:18:05 -0400 (0:00:00.040) 0:00:19.459 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Wednesday 02 April 2025 12:18:05 -0400 (0:00:00.091) 0:00:19.550 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_images": [ "this_is_a_bogus_image" ], "__podman_quadlet_file": "/etc/containers/systemd/bogus.container", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Wednesday 02 April 2025 12:18:05 -0400 (0:00:00.092) 0:00:19.642 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Wednesday 02 April 2025 12:18:05 -0400 (0:00:00.042) 0:00:19.685 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Wednesday 02 April 2025 12:18:05 -0400 (0:00:00.037) 0:00:19.722 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Wednesday 02 April 2025 12:18:05 -0400 (0:00:00.084) 0:00:19.806 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Wednesday 02 April 2025 12:18:05 -0400 (0:00:00.088) 0:00:19.895 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Wednesday 02 April 2025 12:18:05 -0400 (0:00:00.059) 0:00:19.954 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Wednesday 02 April 2025 12:18:05 -0400 (0:00:00.060) 0:00:20.015 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Wednesday 02 April 2025 12:18:05 -0400 (0:00:00.054) 0:00:20.069 ******* TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Wednesday 02 April 2025 12:18:05 -0400 (0:00:00.045) 0:00:20.115 ******* ok: [managed-node1] => (item=None) => { "attempts": 1, "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Wednesday 02 April 2025 12:18:06 -0400 (0:00:01.026) 0:00:21.141 ******* ok: [managed-node1] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 30, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Wednesday 02 April 2025 12:18:07 -0400 (0:00:00.380) 0:00:21.522 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Wednesday 02 April 2025 12:18:07 -0400 (0:00:00.036) 0:00:21.559 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Wednesday 02 April 2025 12:18:07 -0400 (0:00:00.037) 0:00:21.596 ******* changed: [managed-node1] => { "changed": true, "checksum": "1d087e679d135214e8ac9ccaf33b2222916efb7f", "dest": "/etc/containers/systemd/bogus.container", "gid": 0, "group": "root", "md5sum": "97480a9a73734d9f8007d2c06e7fed1f", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 138, "src": "/root/.ansible/tmp/ansible-tmp-1743610687.3076878-13176-193841512605995/source", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Wednesday 02 April 2025 12:18:07 -0400 (0:00:00.709) 0:00:22.305 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Wednesday 02 April 2025 12:18:08 -0400 (0:00:00.041) 0:00:22.347 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Wednesday 02 April 2025 12:18:08 -0400 (0:00:00.043) 0:00:22.390 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:198 Wednesday 02 April 2025 12:18:08 -0400 (0:00:00.046) 0:00:22.436 ******* TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:204 Wednesday 02 April 2025 12:18:08 -0400 (0:00:00.033) 0:00:22.470 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:213 Wednesday 02 April 2025 12:18:08 -0400 (0:00:00.035) 0:00:22.506 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Verify image not pulled and no error] ************************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:106 Wednesday 02 April 2025 12:18:08 -0400 (0:00:00.034) 0:00:22.540 ******* ok: [managed-node1] => { "changed": false } MSG: All assertions passed TASK [Cleanup] ***************************************************************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:113 Wednesday 02 April 2025 12:18:08 -0400 (0:00:00.041) 0:00:22.582 ******* TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Wednesday 02 April 2025 12:18:08 -0400 (0:00:00.131) 0:00:22.713 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Wednesday 02 April 2025 12:18:08 -0400 (0:00:00.153) 0:00:22.867 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Wednesday 02 April 2025 12:18:08 -0400 (0:00:00.071) 0:00:22.938 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Wednesday 02 April 2025 12:18:08 -0400 (0:00:00.061) 0:00:23.000 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Wednesday 02 April 2025 12:18:08 -0400 (0:00:00.055) 0:00:23.055 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Wednesday 02 April 2025 12:18:08 -0400 (0:00:00.059) 0:00:23.115 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Wednesday 02 April 2025 12:18:08 -0400 (0:00:00.073) 0:00:23.189 ******* [WARNING]: The loop variable 'item' is already in use. You should set the `loop_var` value in the `loop_control` option for the task to something else to avoid variable collisions and unexpected behavior. ok: [managed-node1] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node1] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node1] => (item=CentOS_8.yml) => { "ansible_facts": { "__podman_packages": [ "crun", "podman", "podman-plugins", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [managed-node1] => (item=CentOS_8.yml) => { "ansible_facts": { "__podman_packages": [ "crun", "podman", "podman-plugins", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Wednesday 02 April 2025 12:18:08 -0400 (0:00:00.113) 0:00:23.302 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Wednesday 02 April 2025 12:18:10 -0400 (0:00:01.463) 0:00:24.766 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Wednesday 02 April 2025 12:18:10 -0400 (0:00:00.036) 0:00:24.802 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Wednesday 02 April 2025 12:18:10 -0400 (0:00:00.117) 0:00:24.919 ******* skipping: [managed-node1] => {} TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Wednesday 02 April 2025 12:18:10 -0400 (0:00:00.037) 0:00:24.956 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Wednesday 02 April 2025 12:18:10 -0400 (0:00:00.035) 0:00:24.992 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Wednesday 02 April 2025 12:18:10 -0400 (0:00:00.036) 0:00:25.029 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.027163", "end": "2025-04-02 12:18:11.022996", "rc": 0, "start": "2025-04-02 12:18:10.995833" } STDOUT: podman version 4.9.4-dev TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Wednesday 02 April 2025 12:18:11 -0400 (0:00:00.402) 0:00:25.431 ******* ok: [managed-node1] => { "ansible_facts": { "podman_version": "4.9.4-dev" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Wednesday 02 April 2025 12:18:11 -0400 (0:00:00.048) 0:00:25.480 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Wednesday 02 April 2025 12:18:11 -0400 (0:00:00.058) 0:00:25.538 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } META: end_host conditional evaluated to false, continuing execution for managed-node1 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Wednesday 02 April 2025 12:18:11 -0400 (0:00:00.100) 0:00:25.638 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } META: end_host conditional evaluated to false, continuing execution for managed-node1 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Wednesday 02 April 2025 12:18:11 -0400 (0:00:00.103) 0:00:25.742 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:18:11 -0400 (0:00:00.078) 0:00:25.821 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:18:11 -0400 (0:00:00.045) 0:00:25.866 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:18:11 -0400 (0:00:00.045) 0:00:25.911 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:18:11 -0400 (0:00:00.051) 0:00:25.962 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610609.2783134, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1743610533.3987217, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986653, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "4263604762", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:18:12 -0400 (0:00:00.374) 0:00:26.337 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:18:12 -0400 (0:00:00.039) 0:00:26.376 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:18:12 -0400 (0:00:00.039) 0:00:26.416 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:18:12 -0400 (0:00:00.040) 0:00:26.457 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:18:12 -0400 (0:00:00.039) 0:00:26.496 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:18:12 -0400 (0:00:00.039) 0:00:26.536 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:18:12 -0400 (0:00:00.040) 0:00:26.576 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:18:12 -0400 (0:00:00.038) 0:00:26.615 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Wednesday 02 April 2025 12:18:12 -0400 (0:00:00.039) 0:00:26.655 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_parent_mode": "0755", "__podman_parent_path": "/etc/containers", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:126 Wednesday 02 April 2025 12:18:12 -0400 (0:00:00.051) 0:00:26.706 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Wednesday 02 April 2025 12:18:12 -0400 (0:00:00.066) 0:00:26.773 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Wednesday 02 April 2025 12:18:12 -0400 (0:00:00.037) 0:00:26.810 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:129 Wednesday 02 April 2025 12:18:12 -0400 (0:00:00.036) 0:00:26.847 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Wednesday 02 April 2025 12:18:12 -0400 (0:00:00.071) 0:00:26.918 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Wednesday 02 April 2025 12:18:12 -0400 (0:00:00.037) 0:00:26.955 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:132 Wednesday 02 April 2025 12:18:12 -0400 (0:00:00.091) 0:00:27.046 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:7 Wednesday 02 April 2025 12:18:12 -0400 (0:00:00.073) 0:00:27.120 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:15 Wednesday 02 April 2025 12:18:12 -0400 (0:00:00.036) 0:00:27.156 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:135 Wednesday 02 April 2025 12:18:12 -0400 (0:00:00.038) 0:00:27.195 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:8 Wednesday 02 April 2025 12:18:12 -0400 (0:00:00.085) 0:00:27.281 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:16 Wednesday 02 April 2025 12:18:12 -0400 (0:00:00.037) 0:00:27.318 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:21 Wednesday 02 April 2025 12:18:13 -0400 (0:00:00.037) 0:00:27.356 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:27 Wednesday 02 April 2025 12:18:13 -0400 (0:00:00.036) 0:00:27.392 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:141 Wednesday 02 April 2025 12:18:13 -0400 (0:00:00.037) 0:00:27.429 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:148 Wednesday 02 April 2025 12:18:13 -0400 (0:00:00.035) 0:00:27.465 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:155 Wednesday 02 April 2025 12:18:13 -0400 (0:00:00.037) 0:00:27.503 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:159 Wednesday 02 April 2025 12:18:13 -0400 (0:00:00.034) 0:00:27.538 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:168 Wednesday 02 April 2025 12:18:13 -0400 (0:00:00.035) 0:00:27.573 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:177 Wednesday 02 April 2025 12:18:13 -0400 (0:00:00.034) 0:00:27.608 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:184 Wednesday 02 April 2025 12:18:13 -0400 (0:00:00.033) 0:00:27.642 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:191 Wednesday 02 April 2025 12:18:13 -0400 (0:00:00.034) 0:00:27.676 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Wednesday 02 April 2025 12:18:13 -0400 (0:00:00.143) 0:00:27.820 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Wednesday 02 April 2025 12:18:13 -0400 (0:00:00.047) 0:00:27.867 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Wednesday 02 April 2025 12:18:13 -0400 (0:00:00.045) 0:00:27.912 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Wednesday 02 April 2025 12:18:13 -0400 (0:00:00.045) 0:00:27.958 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_name": "nopull", "__podman_quadlet_type": "container", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Wednesday 02 April 2025 12:18:13 -0400 (0:00:00.054) 0:00:28.012 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:18:13 -0400 (0:00:00.066) 0:00:28.079 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:18:13 -0400 (0:00:00.044) 0:00:28.124 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:18:13 -0400 (0:00:00.044) 0:00:28.168 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:18:13 -0400 (0:00:00.050) 0:00:28.219 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610609.2783134, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1743610533.3987217, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986653, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "4263604762", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:18:14 -0400 (0:00:00.379) 0:00:28.599 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:18:14 -0400 (0:00:00.039) 0:00:28.638 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:18:14 -0400 (0:00:00.038) 0:00:28.676 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:18:14 -0400 (0:00:00.038) 0:00:28.714 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:18:14 -0400 (0:00:00.039) 0:00:28.754 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:18:14 -0400 (0:00:00.039) 0:00:28.794 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:18:14 -0400 (0:00:00.125) 0:00:28.919 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:18:14 -0400 (0:00:00.062) 0:00:28.982 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Wednesday 02 April 2025 12:18:14 -0400 (0:00:00.063) 0:00:29.045 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "nopull.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Wednesday 02 April 2025 12:18:14 -0400 (0:00:00.101) 0:00:29.146 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Wednesday 02 April 2025 12:18:14 -0400 (0:00:00.064) 0:00:29.211 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Wednesday 02 April 2025 12:18:14 -0400 (0:00:00.061) 0:00:29.272 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/nopull.container", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Wednesday 02 April 2025 12:18:15 -0400 (0:00:00.144) 0:00:29.416 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Wednesday 02 April 2025 12:18:15 -0400 (0:00:00.069) 0:00:29.486 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Wednesday 02 April 2025 12:18:15 -0400 (0:00:00.140) 0:00:29.626 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Wednesday 02 April 2025 12:18:15 -0400 (0:00:00.058) 0:00:29.684 ******* ok: [managed-node1] => { "changed": false, "failed_when_result": false } MSG: Could not find the requested service nopull.service: host TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Wednesday 02 April 2025 12:18:16 -0400 (0:00:00.892) 0:00:30.577 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610677.8678715, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "670d64fc68a9768edb20cad26df2acc703542d85", "ctime": 1743610677.8718717, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 337641617, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1743610677.519868, "nlink": 1, "path": "/etc/containers/systemd/nopull.container", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 151, "uid": 0, "version": "298782784", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Wednesday 02 April 2025 12:18:16 -0400 (0:00:00.397) 0:00:30.975 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Wednesday 02 April 2025 12:18:16 -0400 (0:00:00.081) 0:00:31.056 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Wednesday 02 April 2025 12:18:17 -0400 (0:00:00.573) 0:00:31.630 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Wednesday 02 April 2025 12:18:17 -0400 (0:00:00.056) 0:00:31.686 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Wednesday 02 April 2025 12:18:17 -0400 (0:00:00.050) 0:00:31.737 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Wednesday 02 April 2025 12:18:17 -0400 (0:00:00.060) 0:00:31.797 ******* changed: [managed-node1] => { "changed": true, "path": "/etc/containers/systemd/nopull.container", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Wednesday 02 April 2025 12:18:17 -0400 (0:00:00.419) 0:00:32.216 ******* ok: [managed-node1] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Wednesday 02 April 2025 12:18:18 -0400 (0:00:00.693) 0:00:32.910 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Wednesday 02 April 2025 12:18:19 -0400 (0:00:00.459) 0:00:33.369 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Wednesday 02 April 2025 12:18:19 -0400 (0:00:00.084) 0:00:33.454 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Wednesday 02 April 2025 12:18:19 -0400 (0:00:00.071) 0:00:33.525 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Wednesday 02 April 2025 12:18:19 -0400 (0:00:00.060) 0:00:33.586 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Wednesday 02 April 2025 12:18:19 -0400 (0:00:00.107) 0:00:33.694 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Wednesday 02 April 2025 12:18:19 -0400 (0:00:00.129) 0:00:33.823 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Wednesday 02 April 2025 12:18:19 -0400 (0:00:00.056) 0:00:33.880 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Wednesday 02 April 2025 12:18:19 -0400 (0:00:00.046) 0:00:33.927 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Wednesday 02 April 2025 12:18:19 -0400 (0:00:00.048) 0:00:33.976 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Wednesday 02 April 2025 12:18:19 -0400 (0:00:00.050) 0:00:34.026 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Wednesday 02 April 2025 12:18:19 -0400 (0:00:00.044) 0:00:34.071 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Wednesday 02 April 2025 12:18:19 -0400 (0:00:00.038) 0:00:34.109 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Wednesday 02 April 2025 12:18:19 -0400 (0:00:00.039) 0:00:34.149 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Wednesday 02 April 2025 12:18:19 -0400 (0:00:00.037) 0:00:34.186 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Wednesday 02 April 2025 12:18:19 -0400 (0:00:00.040) 0:00:34.226 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:198 Wednesday 02 April 2025 12:18:19 -0400 (0:00:00.043) 0:00:34.270 ******* TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:204 Wednesday 02 April 2025 12:18:19 -0400 (0:00:00.046) 0:00:34.316 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:213 Wednesday 02 April 2025 12:18:20 -0400 (0:00:00.057) 0:00:34.374 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Wednesday 02 April 2025 12:18:20 -0400 (0:00:00.054) 0:00:34.428 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Wednesday 02 April 2025 12:18:20 -0400 (0:00:00.112) 0:00:34.541 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Wednesday 02 April 2025 12:18:20 -0400 (0:00:00.068) 0:00:34.610 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Wednesday 02 April 2025 12:18:20 -0400 (0:00:00.059) 0:00:34.669 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Wednesday 02 April 2025 12:18:20 -0400 (0:00:00.042) 0:00:34.712 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Wednesday 02 April 2025 12:18:20 -0400 (0:00:00.110) 0:00:34.823 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Wednesday 02 April 2025 12:18:20 -0400 (0:00:00.038) 0:00:34.862 ******* [WARNING]: The loop variable 'item' is already in use. You should set the `loop_var` value in the `loop_control` option for the task to something else to avoid variable collisions and unexpected behavior. ok: [managed-node1] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node1] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node1] => (item=CentOS_8.yml) => { "ansible_facts": { "__podman_packages": [ "crun", "podman", "podman-plugins", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [managed-node1] => (item=CentOS_8.yml) => { "ansible_facts": { "__podman_packages": [ "crun", "podman", "podman-plugins", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Wednesday 02 April 2025 12:18:20 -0400 (0:00:00.096) 0:00:34.958 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Wednesday 02 April 2025 12:18:22 -0400 (0:00:01.475) 0:00:36.433 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Wednesday 02 April 2025 12:18:22 -0400 (0:00:00.038) 0:00:36.472 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Wednesday 02 April 2025 12:18:22 -0400 (0:00:00.043) 0:00:36.515 ******* skipping: [managed-node1] => {} TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Wednesday 02 April 2025 12:18:22 -0400 (0:00:00.036) 0:00:36.552 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Wednesday 02 April 2025 12:18:22 -0400 (0:00:00.044) 0:00:36.597 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Wednesday 02 April 2025 12:18:22 -0400 (0:00:00.036) 0:00:36.633 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.029158", "end": "2025-04-02 12:18:22.620676", "rc": 0, "start": "2025-04-02 12:18:22.591518" } STDOUT: podman version 4.9.4-dev TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Wednesday 02 April 2025 12:18:22 -0400 (0:00:00.394) 0:00:37.027 ******* ok: [managed-node1] => { "ansible_facts": { "podman_version": "4.9.4-dev" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Wednesday 02 April 2025 12:18:22 -0400 (0:00:00.038) 0:00:37.066 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Wednesday 02 April 2025 12:18:22 -0400 (0:00:00.037) 0:00:37.103 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } META: end_host conditional evaluated to false, continuing execution for managed-node1 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Wednesday 02 April 2025 12:18:22 -0400 (0:00:00.065) 0:00:37.169 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } META: end_host conditional evaluated to false, continuing execution for managed-node1 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Wednesday 02 April 2025 12:18:22 -0400 (0:00:00.088) 0:00:37.257 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:18:22 -0400 (0:00:00.068) 0:00:37.325 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:18:23 -0400 (0:00:00.045) 0:00:37.371 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:18:23 -0400 (0:00:00.044) 0:00:37.415 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:18:23 -0400 (0:00:00.050) 0:00:37.465 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610609.2783134, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1743610533.3987217, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986653, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "4263604762", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:18:23 -0400 (0:00:00.380) 0:00:37.846 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:18:23 -0400 (0:00:00.037) 0:00:37.883 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:18:23 -0400 (0:00:00.040) 0:00:37.923 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:18:23 -0400 (0:00:00.038) 0:00:37.962 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:18:23 -0400 (0:00:00.088) 0:00:38.051 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:18:23 -0400 (0:00:00.038) 0:00:38.090 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:18:23 -0400 (0:00:00.039) 0:00:38.130 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:18:23 -0400 (0:00:00.039) 0:00:38.170 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Wednesday 02 April 2025 12:18:23 -0400 (0:00:00.037) 0:00:38.207 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_parent_mode": "0755", "__podman_parent_path": "/etc/containers", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:126 Wednesday 02 April 2025 12:18:23 -0400 (0:00:00.052) 0:00:38.260 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Wednesday 02 April 2025 12:18:23 -0400 (0:00:00.067) 0:00:38.327 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Wednesday 02 April 2025 12:18:24 -0400 (0:00:00.037) 0:00:38.364 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:129 Wednesday 02 April 2025 12:18:24 -0400 (0:00:00.036) 0:00:38.401 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Wednesday 02 April 2025 12:18:24 -0400 (0:00:00.072) 0:00:38.474 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Wednesday 02 April 2025 12:18:24 -0400 (0:00:00.036) 0:00:38.510 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:132 Wednesday 02 April 2025 12:18:24 -0400 (0:00:00.038) 0:00:38.549 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:7 Wednesday 02 April 2025 12:18:24 -0400 (0:00:00.082) 0:00:38.631 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:15 Wednesday 02 April 2025 12:18:24 -0400 (0:00:00.037) 0:00:38.668 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:135 Wednesday 02 April 2025 12:18:24 -0400 (0:00:00.036) 0:00:38.704 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:8 Wednesday 02 April 2025 12:18:24 -0400 (0:00:00.129) 0:00:38.834 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:16 Wednesday 02 April 2025 12:18:24 -0400 (0:00:00.036) 0:00:38.870 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:21 Wednesday 02 April 2025 12:18:24 -0400 (0:00:00.037) 0:00:38.908 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:27 Wednesday 02 April 2025 12:18:24 -0400 (0:00:00.036) 0:00:38.945 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:141 Wednesday 02 April 2025 12:18:24 -0400 (0:00:00.038) 0:00:38.983 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:148 Wednesday 02 April 2025 12:18:24 -0400 (0:00:00.036) 0:00:39.020 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:155 Wednesday 02 April 2025 12:18:24 -0400 (0:00:00.038) 0:00:39.059 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:159 Wednesday 02 April 2025 12:18:24 -0400 (0:00:00.036) 0:00:39.095 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:168 Wednesday 02 April 2025 12:18:24 -0400 (0:00:00.034) 0:00:39.130 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:177 Wednesday 02 April 2025 12:18:24 -0400 (0:00:00.035) 0:00:39.166 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:184 Wednesday 02 April 2025 12:18:24 -0400 (0:00:00.034) 0:00:39.201 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:191 Wednesday 02 April 2025 12:18:24 -0400 (0:00:00.036) 0:00:39.237 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Wednesday 02 April 2025 12:18:24 -0400 (0:00:00.094) 0:00:39.331 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Wednesday 02 April 2025 12:18:25 -0400 (0:00:00.063) 0:00:39.394 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Wednesday 02 April 2025 12:18:25 -0400 (0:00:00.070) 0:00:39.465 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Wednesday 02 April 2025 12:18:25 -0400 (0:00:00.048) 0:00:39.513 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_name": "bogus", "__podman_quadlet_type": "container", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Wednesday 02 April 2025 12:18:25 -0400 (0:00:00.131) 0:00:39.645 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:18:25 -0400 (0:00:00.073) 0:00:39.718 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:18:25 -0400 (0:00:00.045) 0:00:39.764 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:18:25 -0400 (0:00:00.050) 0:00:39.814 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:18:25 -0400 (0:00:00.078) 0:00:39.892 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610609.2783134, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1743610533.3987217, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986653, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "4263604762", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:18:25 -0400 (0:00:00.423) 0:00:40.316 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:18:26 -0400 (0:00:00.062) 0:00:40.378 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:18:26 -0400 (0:00:00.046) 0:00:40.425 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:18:26 -0400 (0:00:00.048) 0:00:40.474 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:18:26 -0400 (0:00:00.050) 0:00:40.524 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:18:26 -0400 (0:00:00.047) 0:00:40.571 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:18:26 -0400 (0:00:00.039) 0:00:40.611 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:18:26 -0400 (0:00:00.059) 0:00:40.671 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Wednesday 02 April 2025 12:18:26 -0400 (0:00:00.066) 0:00:40.737 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "bogus.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Wednesday 02 April 2025 12:18:26 -0400 (0:00:00.073) 0:00:40.811 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Wednesday 02 April 2025 12:18:26 -0400 (0:00:00.050) 0:00:40.861 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Wednesday 02 April 2025 12:18:26 -0400 (0:00:00.102) 0:00:40.964 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/bogus.container", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Wednesday 02 April 2025 12:18:26 -0400 (0:00:00.090) 0:00:41.055 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Wednesday 02 April 2025 12:18:26 -0400 (0:00:00.050) 0:00:41.105 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Wednesday 02 April 2025 12:18:26 -0400 (0:00:00.124) 0:00:41.229 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Wednesday 02 April 2025 12:18:26 -0400 (0:00:00.044) 0:00:41.273 ******* changed: [managed-node1] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "bogus.service", "state": "stopped", "status": { "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "basic.target sysinit.target system.slice -.mount systemd-journald.socket", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "yes", "DelegateControllers": "cpu cpuacct cpuset io blkio memory devices pids", "Description": "bogus.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "Environment": "PODMAN_SYSTEMD_UNIT=bogus.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name=bogus --cidfile=/run/bogus.cid --replace --rm --cgroups=split --sdnotify=conmon -d this_is_a_bogus_image ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/bogus.cid ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/bogus.cid ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/run/systemd/generator/bogus.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "bogus.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "MemoryAccounting": "yes", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "bogus.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "sysinit.target system.slice -.mount", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/bogus.container", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogIdentifier": "bogus", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22405", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Wednesday 02 April 2025 12:18:27 -0400 (0:00:00.667) 0:00:41.941 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610698.408089, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "1d087e679d135214e8ac9ccaf33b2222916efb7f", "ctime": 1743610687.8919778, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 377487491, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1743610687.628975, "nlink": 1, "path": "/etc/containers/systemd/bogus.container", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 138, "uid": 0, "version": "240087160", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Wednesday 02 April 2025 12:18:27 -0400 (0:00:00.375) 0:00:42.316 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Wednesday 02 April 2025 12:18:28 -0400 (0:00:00.067) 0:00:42.383 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Wednesday 02 April 2025 12:18:28 -0400 (0:00:00.383) 0:00:42.767 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Wednesday 02 April 2025 12:18:28 -0400 (0:00:00.054) 0:00:42.822 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Wednesday 02 April 2025 12:18:28 -0400 (0:00:00.037) 0:00:42.859 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Wednesday 02 April 2025 12:18:28 -0400 (0:00:00.095) 0:00:42.954 ******* changed: [managed-node1] => { "changed": true, "path": "/etc/containers/systemd/bogus.container", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Wednesday 02 April 2025 12:18:29 -0400 (0:00:00.397) 0:00:43.352 ******* ok: [managed-node1] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Wednesday 02 April 2025 12:18:29 -0400 (0:00:00.619) 0:00:43.971 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Wednesday 02 April 2025 12:18:30 -0400 (0:00:00.429) 0:00:44.401 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Wednesday 02 April 2025 12:18:30 -0400 (0:00:00.054) 0:00:44.456 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Wednesday 02 April 2025 12:18:30 -0400 (0:00:00.035) 0:00:44.491 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Wednesday 02 April 2025 12:18:30 -0400 (0:00:00.040) 0:00:44.532 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Wednesday 02 April 2025 12:18:30 -0400 (0:00:00.065) 0:00:44.597 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Wednesday 02 April 2025 12:18:30 -0400 (0:00:00.037) 0:00:44.634 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Wednesday 02 April 2025 12:18:30 -0400 (0:00:00.036) 0:00:44.671 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Wednesday 02 April 2025 12:18:30 -0400 (0:00:00.037) 0:00:44.709 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Wednesday 02 April 2025 12:18:30 -0400 (0:00:00.039) 0:00:44.748 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Wednesday 02 April 2025 12:18:30 -0400 (0:00:00.106) 0:00:44.854 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Wednesday 02 April 2025 12:18:30 -0400 (0:00:00.040) 0:00:44.894 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Wednesday 02 April 2025 12:18:30 -0400 (0:00:00.039) 0:00:44.934 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Wednesday 02 April 2025 12:18:30 -0400 (0:00:00.041) 0:00:44.975 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Wednesday 02 April 2025 12:18:30 -0400 (0:00:00.039) 0:00:45.014 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Wednesday 02 April 2025 12:18:30 -0400 (0:00:00.040) 0:00:45.054 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:198 Wednesday 02 April 2025 12:18:30 -0400 (0:00:00.036) 0:00:45.091 ******* TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:204 Wednesday 02 April 2025 12:18:30 -0400 (0:00:00.035) 0:00:45.126 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:213 Wednesday 02 April 2025 12:18:30 -0400 (0:00:00.041) 0:00:45.167 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Create user for testing] ************************************************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:125 Wednesday 02 April 2025 12:18:30 -0400 (0:00:00.038) 0:00:45.206 ******* changed: [managed-node1] => { "changed": true, "comment": "", "create_home": true, "group": 1111, "home": "/home/user_quadlet_basic", "name": "user_quadlet_basic", "shell": "/bin/bash", "state": "present", "system": false, "uid": 1111 } TASK [Get local machine ID] **************************************************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:138 Wednesday 02 April 2025 12:18:31 -0400 (0:00:00.878) 0:00:46.085 ******* ok: [managed-node1 -> localhost] => { "changed": false, "content": "NmRlMjA1MjRkNGJhNDkwOGJmNGZkNjVjMjEzZTc1M2UK", "encoding": "base64", "source": "/etc/machine-id" } META: end_host conditional evaluated to false, continuing execution for managed-node1 TASK [Enable cgroup controllers] *********************************************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:150 Wednesday 02 April 2025 12:18:32 -0400 (0:00:00.483) 0:00:46.568 ******* changed: [managed-node1] => { "changed": true, "cmd": "set -euxo pipefail\ncat > /etc/systemd/system/user-0.slice < /etc/systemd/system/user@.service.d/delegate.conf < /etc/systemd/system/user-.slice.d/override.conf < { "changed": true, "cmd": [ "grubby", "--update-kernel=ALL", "--args=systemd.unified_cgroup_hierarchy=1" ], "delta": "0:00:00.545940", "end": "2025-04-02 12:18:33.817016", "rc": 0, "start": "2025-04-02 12:18:33.271076" } TASK [Reboot] ****************************************************************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:188 Wednesday 02 April 2025 12:18:33 -0400 (0:00:01.118) 0:00:48.243 ******* changed: [managed-node1] => { "changed": true, "elapsed": 39, "rebooted": true } TASK [Run the role - user] ***************************************************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:191 Wednesday 02 April 2025 12:19:14 -0400 (0:00:40.312) 0:01:28.556 ******* TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Wednesday 02 April 2025 12:19:14 -0400 (0:00:00.151) 0:01:28.708 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Wednesday 02 April 2025 12:19:14 -0400 (0:00:00.175) 0:01:28.883 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Wednesday 02 April 2025 12:19:14 -0400 (0:00:00.070) 0:01:28.953 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Wednesday 02 April 2025 12:19:14 -0400 (0:00:00.045) 0:01:28.998 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Wednesday 02 April 2025 12:19:14 -0400 (0:00:00.044) 0:01:29.043 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Wednesday 02 April 2025 12:19:14 -0400 (0:00:00.053) 0:01:29.096 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Wednesday 02 April 2025 12:19:14 -0400 (0:00:00.045) 0:01:29.141 ******* ok: [managed-node1] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node1] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node1] => (item=CentOS_8.yml) => { "ansible_facts": { "__podman_packages": [ "crun", "podman", "podman-plugins", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [managed-node1] => (item=CentOS_8.yml) => { "ansible_facts": { "__podman_packages": [ "crun", "podman", "podman-plugins", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Wednesday 02 April 2025 12:19:14 -0400 (0:00:00.099) 0:01:29.241 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Wednesday 02 April 2025 12:19:17 -0400 (0:00:02.196) 0:01:31.437 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Wednesday 02 April 2025 12:19:17 -0400 (0:00:00.041) 0:01:31.479 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Wednesday 02 April 2025 12:19:17 -0400 (0:00:00.055) 0:01:31.534 ******* skipping: [managed-node1] => {} TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Wednesday 02 April 2025 12:19:17 -0400 (0:00:00.043) 0:01:31.578 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Wednesday 02 April 2025 12:19:17 -0400 (0:00:00.041) 0:01:31.620 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Wednesday 02 April 2025 12:19:17 -0400 (0:00:00.048) 0:01:31.668 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.077271", "end": "2025-04-02 12:19:17.705860", "rc": 0, "start": "2025-04-02 12:19:17.628589" } STDOUT: podman version 4.9.4-dev TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Wednesday 02 April 2025 12:19:17 -0400 (0:00:00.449) 0:01:32.118 ******* ok: [managed-node1] => { "ansible_facts": { "podman_version": "4.9.4-dev" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Wednesday 02 April 2025 12:19:17 -0400 (0:00:00.040) 0:01:32.159 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Wednesday 02 April 2025 12:19:17 -0400 (0:00:00.036) 0:01:32.195 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } META: end_host conditional evaluated to false, continuing execution for managed-node1 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Wednesday 02 April 2025 12:19:17 -0400 (0:00:00.139) 0:01:32.334 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } META: end_host conditional evaluated to false, continuing execution for managed-node1 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Wednesday 02 April 2025 12:19:18 -0400 (0:00:00.136) 0:01:32.471 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:19:18 -0400 (0:00:00.070) 0:01:32.541 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:19:18 -0400 (0:00:00.046) 0:01:32.587 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:19:18 -0400 (0:00:00.044) 0:01:32.632 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:19:18 -0400 (0:00:00.176) 0:01:32.808 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610609.2783134, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1743610533.3987217, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986653, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "4263604762", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:19:18 -0400 (0:00:00.390) 0:01:33.199 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:19:18 -0400 (0:00:00.039) 0:01:33.239 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:19:18 -0400 (0:00:00.040) 0:01:33.280 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:19:18 -0400 (0:00:00.039) 0:01:33.319 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:19:19 -0400 (0:00:00.039) 0:01:33.359 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:19:19 -0400 (0:00:00.040) 0:01:33.400 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:19:19 -0400 (0:00:00.039) 0:01:33.440 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:19:19 -0400 (0:00:00.040) 0:01:33.480 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Wednesday 02 April 2025 12:19:19 -0400 (0:00:00.039) 0:01:33.520 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_parent_mode": "0755", "__podman_parent_path": "/etc/containers", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:126 Wednesday 02 April 2025 12:19:19 -0400 (0:00:00.052) 0:01:33.572 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Wednesday 02 April 2025 12:19:19 -0400 (0:00:00.067) 0:01:33.639 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Wednesday 02 April 2025 12:19:19 -0400 (0:00:00.038) 0:01:33.678 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:129 Wednesday 02 April 2025 12:19:19 -0400 (0:00:00.046) 0:01:33.724 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Wednesday 02 April 2025 12:19:19 -0400 (0:00:00.122) 0:01:33.846 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Wednesday 02 April 2025 12:19:19 -0400 (0:00:00.036) 0:01:33.883 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:132 Wednesday 02 April 2025 12:19:19 -0400 (0:00:00.038) 0:01:33.922 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:7 Wednesday 02 April 2025 12:19:19 -0400 (0:00:00.069) 0:01:33.992 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:15 Wednesday 02 April 2025 12:19:19 -0400 (0:00:00.038) 0:01:34.030 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:135 Wednesday 02 April 2025 12:19:19 -0400 (0:00:00.036) 0:01:34.067 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:8 Wednesday 02 April 2025 12:19:19 -0400 (0:00:00.073) 0:01:34.141 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:16 Wednesday 02 April 2025 12:19:19 -0400 (0:00:00.036) 0:01:34.177 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:21 Wednesday 02 April 2025 12:19:19 -0400 (0:00:00.038) 0:01:34.216 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:27 Wednesday 02 April 2025 12:19:19 -0400 (0:00:00.036) 0:01:34.252 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:141 Wednesday 02 April 2025 12:19:19 -0400 (0:00:00.037) 0:01:34.290 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:148 Wednesday 02 April 2025 12:19:19 -0400 (0:00:00.038) 0:01:34.329 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:155 Wednesday 02 April 2025 12:19:20 -0400 (0:00:00.037) 0:01:34.366 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:159 Wednesday 02 April 2025 12:19:20 -0400 (0:00:00.038) 0:01:34.404 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:168 Wednesday 02 April 2025 12:19:20 -0400 (0:00:00.035) 0:01:34.440 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:177 Wednesday 02 April 2025 12:19:20 -0400 (0:00:00.036) 0:01:34.476 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node1 included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Wednesday 02 April 2025 12:19:20 -0400 (0:00:00.194) 0:01:34.670 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_user": "user_quadlet_basic" }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Wednesday 02 April 2025 12:19:20 -0400 (0:00:00.070) 0:01:34.741 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:19:20 -0400 (0:00:00.117) 0:01:34.858 ******* ok: [managed-node1] => { "ansible_facts": { "getent_passwd": { "user_quadlet_basic": [ "x", "1111", "1111", "", "/home/user_quadlet_basic", "/bin/bash" ] } }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:19:20 -0400 (0:00:00.449) 0:01:35.307 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:19:21 -0400 (0:00:00.071) 0:01:35.379 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_group": "1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:19:21 -0400 (0:00:00.081) 0:01:35.461 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:19:21 -0400 (0:00:00.058) 0:01:35.520 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:19:21 -0400 (0:00:00.061) 0:01:35.581 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:19:21 -0400 (0:00:00.060) 0:01:35.642 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:19:21 -0400 (0:00:00.062) 0:01:35.704 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:19:21 -0400 (0:00:00.059) 0:01:35.764 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:19:21 -0400 (0:00:00.065) 0:01:35.829 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:19:21 -0400 (0:00:00.070) 0:01:35.900 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:19:21 -0400 (0:00:00.059) 0:01:35.959 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14 Wednesday 02 April 2025 12:19:21 -0400 (0:00:00.063) 0:01:36.023 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_rootless": true, "__podman_xdg_runtime_dir": "/run/user/1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20 Wednesday 02 April 2025 12:19:21 -0400 (0:00:00.077) 0:01:36.101 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Wednesday 02 April 2025 12:19:21 -0400 (0:00:00.147) 0:01:36.249 ******* changed: [managed-node1] => { "changed": true, "cmd": [ "loginctl", "enable-linger", "user_quadlet_basic" ], "delta": "0:00:00.019459", "end": "2025-04-02 12:19:22.242324", "rc": 0, "start": "2025-04-02 12:19:22.222865" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Wednesday 02 April 2025 12:19:22 -0400 (0:00:00.435) 0:01:36.684 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Wednesday 02 April 2025 12:19:22 -0400 (0:00:00.059) 0:01:36.743 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25 Wednesday 02 April 2025 12:19:22 -0400 (0:00:00.050) 0:01:36.794 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610762.2496548, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1743610762.3036547, "dev": 36, "device_type": 0, "executable": true, "exists": true, "gid": 1111, "gr_name": "user_quadlet_basic", "inode": 28283, "isblk": false, "ischr": false, "isdir": true, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/directory", "mode": "0700", "mtime": 1743610762.3036547, "nlink": 3, "path": "/run/user/1111", "pw_name": "user_quadlet_basic", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 80, "uid": 1111, "version": null, "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": true } } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41 Wednesday 02 April 2025 12:19:22 -0400 (0:00:00.388) 0:01:37.183 ******* [WARNING]: Using a variable for a task's 'args' is unsafe in some situations (see https://docs.ansible.com/ansible/devel/reference_appendices/faq.html#argsplat- unsafe) changed: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Wednesday 02 April 2025 12:19:24 -0400 (0:00:01.165) 0:01:38.349 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_user": "user_quadlet_basic" }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Wednesday 02 April 2025 12:19:24 -0400 (0:00:00.067) 0:01:38.417 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:19:24 -0400 (0:00:00.107) 0:01:38.524 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:19:24 -0400 (0:00:00.071) 0:01:38.596 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:19:24 -0400 (0:00:00.208) 0:01:38.805 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_group": "1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:19:24 -0400 (0:00:00.059) 0:01:38.864 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:19:24 -0400 (0:00:00.047) 0:01:38.912 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:19:24 -0400 (0:00:00.056) 0:01:38.969 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:19:24 -0400 (0:00:00.037) 0:01:39.006 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:19:24 -0400 (0:00:00.038) 0:01:39.045 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:19:24 -0400 (0:00:00.036) 0:01:39.082 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:19:24 -0400 (0:00:00.043) 0:01:39.125 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:19:24 -0400 (0:00:00.062) 0:01:39.188 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:19:24 -0400 (0:00:00.063) 0:01:39.251 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14 Wednesday 02 April 2025 12:19:24 -0400 (0:00:00.062) 0:01:39.314 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_rootless": true, "__podman_xdg_runtime_dir": "/run/user/1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20 Wednesday 02 April 2025 12:19:25 -0400 (0:00:00.072) 0:01:39.386 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Wednesday 02 April 2025 12:19:25 -0400 (0:00:00.109) 0:01:39.495 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "loginctl", "enable-linger", "user_quadlet_basic" ], "rc": 0 } STDOUT: skipped, since /var/lib/systemd/linger/user_quadlet_basic exists TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Wednesday 02 April 2025 12:19:25 -0400 (0:00:00.423) 0:01:39.919 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Wednesday 02 April 2025 12:19:25 -0400 (0:00:00.055) 0:01:39.974 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25 Wednesday 02 April 2025 12:19:25 -0400 (0:00:00.062) 0:01:40.037 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610762.2496548, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1743610763.796654, "dev": 36, "device_type": 0, "executable": true, "exists": true, "gid": 1111, "gr_name": "user_quadlet_basic", "inode": 28283, "isblk": false, "ischr": false, "isdir": true, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/directory", "mode": "0700", "mtime": 1743610763.796654, "nlink": 6, "path": "/run/user/1111", "pw_name": "user_quadlet_basic", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 140, "uid": 1111, "version": null, "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": true } } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41 Wednesday 02 April 2025 12:19:26 -0400 (0:00:00.440) 0:01:40.478 ******* [WARNING]: Using a variable for a task's 'args' is unsafe in some situations (see https://docs.ansible.com/ansible/devel/reference_appendices/faq.html#argsplat- unsafe) changed: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:184 Wednesday 02 April 2025 12:19:26 -0400 (0:00:00.610) 0:01:41.089 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:191 Wednesday 02 April 2025 12:19:26 -0400 (0:00:00.058) 0:01:41.147 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Wednesday 02 April 2025 12:19:26 -0400 (0:00:00.172) 0:01:41.320 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Network]\nSubnet=192.168.29.0/24\nGateway=192.168.29.1\nLabel=app=wordpress\nNetworkName=quadlet-basic-name\n", "__podman_quadlet_template_src": "templates/quadlet-basic.network.j2" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Wednesday 02 April 2025 12:19:27 -0400 (0:00:00.112) 0:01:41.432 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_basic" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Wednesday 02 April 2025 12:19:27 -0400 (0:00:00.046) 0:01:41.479 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Wednesday 02 April 2025 12:19:27 -0400 (0:00:00.041) 0:01:41.521 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-basic", "__podman_quadlet_type": "network", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Wednesday 02 April 2025 12:19:27 -0400 (0:00:00.052) 0:01:41.574 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:19:27 -0400 (0:00:00.068) 0:01:41.642 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:19:27 -0400 (0:00:00.044) 0:01:41.687 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:19:27 -0400 (0:00:00.044) 0:01:41.732 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_group": "1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:19:27 -0400 (0:00:00.050) 0:01:41.783 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610609.2783134, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1743610533.3987217, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986653, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "4263604762", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:19:27 -0400 (0:00:00.389) 0:01:42.173 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_basic" ], "delta": "0:00:00.003397", "end": "2025-04-02 12:19:28.149242", "rc": 0, "start": "2025-04-02 12:19:28.145845" } STDOUT: 0: user_quadlet_basic 100000 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:19:28 -0400 (0:00:00.387) 0:01:42.561 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_basic" ], "delta": "0:00:00.003588", "end": "2025-04-02 12:19:28.536405", "rc": 0, "start": "2025-04-02 12:19:28.532817" } STDOUT: 0: user_quadlet_basic 100000 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:19:28 -0400 (0:00:00.393) 0:01:42.954 ******* ok: [managed-node1] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_basic": { "range": 65536, "start": 100000 } }, "podman_subuid_info": { "user_quadlet_basic": { "range": 65536, "start": 100000 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:19:28 -0400 (0:00:00.062) 0:01:43.016 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:19:28 -0400 (0:00:00.060) 0:01:43.076 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:19:28 -0400 (0:00:00.044) 0:01:43.121 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:19:28 -0400 (0:00:00.039) 0:01:43.161 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:19:28 -0400 (0:00:00.039) 0:01:43.201 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Wednesday 02 April 2025 12:19:28 -0400 (0:00:00.039) 0:01:43.240 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-basic-network.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_basic", "__podman_xdg_runtime_dir": "/run/user/1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Wednesday 02 April 2025 12:19:28 -0400 (0:00:00.081) 0:01:43.321 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_basic/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Wednesday 02 April 2025 12:19:29 -0400 (0:00:00.061) 0:01:43.382 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Wednesday 02 April 2025 12:19:29 -0400 (0:00:00.048) 0:01:43.431 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic.network", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Wednesday 02 April 2025 12:19:29 -0400 (0:00:00.114) 0:01:43.545 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Wednesday 02 April 2025 12:19:29 -0400 (0:00:00.051) 0:01:43.597 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Wednesday 02 April 2025 12:19:29 -0400 (0:00:00.094) 0:01:43.692 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Wednesday 02 April 2025 12:19:29 -0400 (0:00:00.086) 0:01:43.778 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Wednesday 02 April 2025 12:19:29 -0400 (0:00:00.072) 0:01:43.851 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "loginctl", "enable-linger", "user_quadlet_basic" ], "rc": 0 } STDOUT: skipped, since /var/lib/systemd/linger/user_quadlet_basic exists TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Wednesday 02 April 2025 12:19:29 -0400 (0:00:00.390) 0:01:44.242 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Wednesday 02 April 2025 12:19:29 -0400 (0:00:00.054) 0:01:44.297 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Wednesday 02 April 2025 12:19:30 -0400 (0:00:00.052) 0:01:44.350 ******* TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Wednesday 02 April 2025 12:19:30 -0400 (0:00:00.042) 0:01:44.392 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Wednesday 02 April 2025 12:19:30 -0400 (0:00:00.037) 0:01:44.430 ******* changed: [managed-node1] => { "changed": true, "gid": 1111, "group": "user_quadlet_basic", "mode": "0755", "owner": "user_quadlet_basic", "path": "/home/user_quadlet_basic/.config/containers/systemd", "secontext": "unconfined_u:object_r:config_home_t:s0", "size": 6, "state": "directory", "uid": 1111 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Wednesday 02 April 2025 12:19:30 -0400 (0:00:00.418) 0:01:44.849 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Wednesday 02 April 2025 12:19:30 -0400 (0:00:00.057) 0:01:44.906 ******* changed: [managed-node1] => { "changed": true, "checksum": "19c9b17be2af9b9deca5c3bd327f048966750682", "dest": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic.network", "gid": 1111, "group": "user_quadlet_basic", "md5sum": "313e9a2e5a99f80fa7023c19a1065658", "mode": "0644", "owner": "user_quadlet_basic", "secontext": "unconfined_u:object_r:config_home_t:s0", "size": 105, "src": "/root/.ansible/tmp/ansible-tmp-1743610770.6321862-15801-233949183344885/source", "state": "file", "uid": 1111 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Wednesday 02 April 2025 12:19:31 -0400 (0:00:00.768) 0:01:45.674 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Wednesday 02 April 2025 12:19:31 -0400 (0:00:00.104) 0:01:45.779 ******* ok: [managed-node1] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Wednesday 02 April 2025 12:19:32 -0400 (0:00:00.571) 0:01:46.351 ******* changed: [managed-node1] => { "changed": true, "name": "quadlet-basic-network.service", "state": "started", "status": { "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "basic.target run-user-1111.mount -.slice -.mount", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "quadlet-basic-network.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore --subnet=192.168.29.0/24 --gateway=192.168.29.1 --label app=wordpress quadlet-basic-name ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/run/user/1111/systemd/generator/quadlet-basic-network.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "quadlet-basic-network.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "inherit", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "MemoryAccounting": "yes", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "quadlet-basic-network.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "-.slice basic.target", "RequiresMountsFor": "/run/user/1111/containers /home/user_quadlet_basic", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "-.slice", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-basic-network", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22405", "TimeoutStartUSec": "infinity", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "generated", "UtmpMode": "init", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0", "WorkingDirectory": "!/home/user_quadlet_basic" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Wednesday 02 April 2025 12:19:32 -0400 (0:00:00.641) 0:01:46.992 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Wednesday 02 April 2025 12:19:32 -0400 (0:00:00.068) 0:01:47.061 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Network": {} }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Wednesday 02 April 2025 12:19:32 -0400 (0:00:00.083) 0:01:47.145 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_basic" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Wednesday 02 April 2025 12:19:32 -0400 (0:00:00.074) 0:01:47.219 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Wednesday 02 April 2025 12:19:32 -0400 (0:00:00.056) 0:01:47.276 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-basic-unused-network", "__podman_quadlet_type": "network", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Wednesday 02 April 2025 12:19:33 -0400 (0:00:00.085) 0:01:47.362 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:19:33 -0400 (0:00:00.083) 0:01:47.446 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:19:33 -0400 (0:00:00.055) 0:01:47.501 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:19:33 -0400 (0:00:00.050) 0:01:47.552 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_group": "1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:19:33 -0400 (0:00:00.052) 0:01:47.604 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610609.2783134, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1743610533.3987217, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986653, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "4263604762", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:19:33 -0400 (0:00:00.415) 0:01:48.019 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_basic" ], "delta": "0:00:00.003383", "end": "2025-04-02 12:19:34.101110", "rc": 0, "start": "2025-04-02 12:19:34.097727" } STDOUT: 0: user_quadlet_basic 100000 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:19:34 -0400 (0:00:00.497) 0:01:48.517 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_basic" ], "delta": "0:00:00.003651", "end": "2025-04-02 12:19:34.490278", "rc": 0, "start": "2025-04-02 12:19:34.486627" } STDOUT: 0: user_quadlet_basic 100000 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:19:34 -0400 (0:00:00.387) 0:01:48.904 ******* ok: [managed-node1] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_basic": { "range": 65536, "start": 100000 } }, "podman_subuid_info": { "user_quadlet_basic": { "range": 65536, "start": 100000 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:19:34 -0400 (0:00:00.069) 0:01:48.974 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:19:34 -0400 (0:00:00.038) 0:01:49.013 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:19:34 -0400 (0:00:00.040) 0:01:49.054 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:19:34 -0400 (0:00:00.039) 0:01:49.093 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:19:34 -0400 (0:00:00.040) 0:01:49.133 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Wednesday 02 April 2025 12:19:34 -0400 (0:00:00.053) 0:01:49.186 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-basic-unused-network-network.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_basic", "__podman_xdg_runtime_dir": "/run/user/1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Wednesday 02 April 2025 12:19:34 -0400 (0:00:00.109) 0:01:49.296 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_basic/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Wednesday 02 April 2025 12:19:35 -0400 (0:00:00.053) 0:01:49.350 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Wednesday 02 April 2025 12:19:35 -0400 (0:00:00.046) 0:01:49.396 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-network.network", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Wednesday 02 April 2025 12:19:35 -0400 (0:00:00.106) 0:01:49.503 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Wednesday 02 April 2025 12:19:35 -0400 (0:00:00.044) 0:01:49.547 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Wednesday 02 April 2025 12:19:35 -0400 (0:00:00.037) 0:01:49.585 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Wednesday 02 April 2025 12:19:35 -0400 (0:00:00.079) 0:01:49.664 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Wednesday 02 April 2025 12:19:35 -0400 (0:00:00.154) 0:01:49.818 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "loginctl", "enable-linger", "user_quadlet_basic" ], "rc": 0 } STDOUT: skipped, since /var/lib/systemd/linger/user_quadlet_basic exists TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Wednesday 02 April 2025 12:19:35 -0400 (0:00:00.376) 0:01:50.195 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Wednesday 02 April 2025 12:19:35 -0400 (0:00:00.052) 0:01:50.247 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Wednesday 02 April 2025 12:19:35 -0400 (0:00:00.047) 0:01:50.295 ******* TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Wednesday 02 April 2025 12:19:36 -0400 (0:00:00.079) 0:01:50.375 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Wednesday 02 April 2025 12:19:36 -0400 (0:00:00.046) 0:01:50.422 ******* ok: [managed-node1] => { "changed": false, "gid": 1111, "group": "user_quadlet_basic", "mode": "0755", "owner": "user_quadlet_basic", "path": "/home/user_quadlet_basic/.config/containers/systemd", "secontext": "unconfined_u:object_r:config_home_t:s0", "size": 35, "state": "directory", "uid": 1111 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Wednesday 02 April 2025 12:19:36 -0400 (0:00:00.406) 0:01:50.828 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Wednesday 02 April 2025 12:19:36 -0400 (0:00:00.047) 0:01:50.876 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Wednesday 02 April 2025 12:19:36 -0400 (0:00:00.045) 0:01:50.922 ******* changed: [managed-node1] => { "changed": true, "checksum": "52c9d75ecaf81203cc1f1a3b1dd00fcd25067b01", "dest": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-network.network", "gid": 1111, "group": "user_quadlet_basic", "md5sum": "968d495367b59475979615e4884cbda2", "mode": "0644", "owner": "user_quadlet_basic", "secontext": "unconfined_u:object_r:config_home_t:s0", "size": 54, "src": "/root/.ansible/tmp/ansible-tmp-1743610776.6376607-16091-44256089335709/source", "state": "file", "uid": 1111 } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Wednesday 02 April 2025 12:19:37 -0400 (0:00:00.722) 0:01:51.645 ******* ok: [managed-node1] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Wednesday 02 April 2025 12:19:37 -0400 (0:00:00.581) 0:01:52.227 ******* changed: [managed-node1] => { "changed": true, "name": "quadlet-basic-unused-network-network.service", "state": "started", "status": { "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "basic.target -.mount -.slice run-user-1111.mount", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "quadlet-basic-unused-network-network.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore systemd-quadlet-basic-unused-network ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/run/user/1111/systemd/generator/quadlet-basic-unused-network-network.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "quadlet-basic-unused-network-network.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "inherit", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "MemoryAccounting": "yes", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "quadlet-basic-unused-network-network.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "-.slice basic.target", "RequiresMountsFor": "/run/user/1111/containers /home/user_quadlet_basic", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "-.slice", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-basic-unused-network-network", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22405", "TimeoutStartUSec": "infinity", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "generated", "UtmpMode": "init", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0", "WorkingDirectory": "!/home/user_quadlet_basic" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Wednesday 02 April 2025 12:19:38 -0400 (0:00:00.667) 0:01:52.894 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Wednesday 02 April 2025 12:19:38 -0400 (0:00:00.063) 0:01:52.958 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Volume": { "VolumeName": "quadlet-basic-mysql-name" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Wednesday 02 April 2025 12:19:38 -0400 (0:00:00.069) 0:01:53.027 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_basic" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Wednesday 02 April 2025 12:19:38 -0400 (0:00:00.071) 0:01:53.099 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Wednesday 02 April 2025 12:19:38 -0400 (0:00:00.055) 0:01:53.154 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-basic-mysql", "__podman_quadlet_type": "volume", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Wednesday 02 April 2025 12:19:38 -0400 (0:00:00.086) 0:01:53.241 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:19:39 -0400 (0:00:00.111) 0:01:53.352 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:19:39 -0400 (0:00:00.083) 0:01:53.436 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:19:39 -0400 (0:00:00.053) 0:01:53.489 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_group": "1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:19:39 -0400 (0:00:00.063) 0:01:53.553 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610609.2783134, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1743610533.3987217, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986653, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "4263604762", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:19:39 -0400 (0:00:00.403) 0:01:53.957 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_basic" ], "delta": "0:00:00.003911", "end": "2025-04-02 12:19:39.969986", "rc": 0, "start": "2025-04-02 12:19:39.966075" } STDOUT: 0: user_quadlet_basic 100000 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:19:40 -0400 (0:00:00.538) 0:01:54.495 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_basic" ], "delta": "0:00:00.004099", "end": "2025-04-02 12:19:40.520161", "rc": 0, "start": "2025-04-02 12:19:40.516062" } STDOUT: 0: user_quadlet_basic 100000 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:19:40 -0400 (0:00:00.438) 0:01:54.934 ******* ok: [managed-node1] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_basic": { "range": 65536, "start": 100000 } }, "podman_subuid_info": { "user_quadlet_basic": { "range": 65536, "start": 100000 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:19:40 -0400 (0:00:00.066) 0:01:55.001 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:19:40 -0400 (0:00:00.040) 0:01:55.041 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:19:40 -0400 (0:00:00.040) 0:01:55.082 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:19:40 -0400 (0:00:00.039) 0:01:55.121 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:19:40 -0400 (0:00:00.046) 0:01:55.167 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Wednesday 02 April 2025 12:19:40 -0400 (0:00:00.056) 0:01:55.224 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-basic-mysql-volume.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_basic", "__podman_xdg_runtime_dir": "/run/user/1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Wednesday 02 April 2025 12:19:40 -0400 (0:00:00.103) 0:01:55.327 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_basic/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Wednesday 02 April 2025 12:19:41 -0400 (0:00:00.066) 0:01:55.393 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Wednesday 02 April 2025 12:19:41 -0400 (0:00:00.059) 0:01:55.453 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.volume", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Wednesday 02 April 2025 12:19:41 -0400 (0:00:00.158) 0:01:55.611 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Wednesday 02 April 2025 12:19:41 -0400 (0:00:00.067) 0:01:55.679 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Wednesday 02 April 2025 12:19:41 -0400 (0:00:00.060) 0:01:55.740 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Wednesday 02 April 2025 12:19:41 -0400 (0:00:00.122) 0:01:55.862 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Wednesday 02 April 2025 12:19:41 -0400 (0:00:00.179) 0:01:56.042 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "loginctl", "enable-linger", "user_quadlet_basic" ], "rc": 0 } STDOUT: skipped, since /var/lib/systemd/linger/user_quadlet_basic exists TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Wednesday 02 April 2025 12:19:42 -0400 (0:00:00.410) 0:01:56.452 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Wednesday 02 April 2025 12:19:42 -0400 (0:00:00.061) 0:01:56.514 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Wednesday 02 April 2025 12:19:42 -0400 (0:00:00.047) 0:01:56.562 ******* TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Wednesday 02 April 2025 12:19:42 -0400 (0:00:00.055) 0:01:56.618 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Wednesday 02 April 2025 12:19:42 -0400 (0:00:00.043) 0:01:56.661 ******* ok: [managed-node1] => { "changed": false, "gid": 1111, "group": "user_quadlet_basic", "mode": "0755", "owner": "user_quadlet_basic", "path": "/home/user_quadlet_basic/.config/containers/systemd", "secontext": "unconfined_u:object_r:config_home_t:s0", "size": 79, "state": "directory", "uid": 1111 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Wednesday 02 April 2025 12:19:42 -0400 (0:00:00.396) 0:01:57.058 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Wednesday 02 April 2025 12:19:42 -0400 (0:00:00.043) 0:01:57.101 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Wednesday 02 April 2025 12:19:42 -0400 (0:00:00.038) 0:01:57.140 ******* changed: [managed-node1] => { "changed": true, "checksum": "90a3571bfc7670328fe3f8fb625585613dbd9c4a", "dest": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.volume", "gid": 1111, "group": "user_quadlet_basic", "md5sum": "8682d71bf3c086f228cd72389b7c9018", "mode": "0644", "owner": "user_quadlet_basic", "secontext": "unconfined_u:object_r:config_home_t:s0", "size": 89, "src": "/root/.ansible/tmp/ansible-tmp-1743610782.852917-16386-47120096876272/source", "state": "file", "uid": 1111 } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Wednesday 02 April 2025 12:19:43 -0400 (0:00:00.707) 0:01:57.848 ******* ok: [managed-node1] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Wednesday 02 April 2025 12:19:44 -0400 (0:00:00.531) 0:01:58.379 ******* changed: [managed-node1] => { "changed": true, "name": "quadlet-basic-mysql-volume.service", "state": "started", "status": { "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "basic.target run-user-1111.mount -.slice -.mount", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "quadlet-basic-mysql-volume.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore quadlet-basic-mysql-name ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/run/user/1111/systemd/generator/quadlet-basic-mysql-volume.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "quadlet-basic-mysql-volume.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "inherit", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "MemoryAccounting": "yes", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "quadlet-basic-mysql-volume.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "basic.target -.slice", "RequiresMountsFor": "/run/user/1111/containers /home/user_quadlet_basic", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "-.slice", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-basic-mysql-volume", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22405", "TimeoutStartUSec": "infinity", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "generated", "UtmpMode": "init", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0", "WorkingDirectory": "!/home/user_quadlet_basic" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Wednesday 02 April 2025 12:19:44 -0400 (0:00:00.617) 0:01:58.997 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Wednesday 02 April 2025 12:19:44 -0400 (0:00:00.063) 0:01:59.061 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Volume": {} }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Wednesday 02 April 2025 12:19:44 -0400 (0:00:00.062) 0:01:59.123 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_basic" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Wednesday 02 April 2025 12:19:44 -0400 (0:00:00.059) 0:01:59.183 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Wednesday 02 April 2025 12:19:44 -0400 (0:00:00.046) 0:01:59.230 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-basic-unused-volume", "__podman_quadlet_type": "volume", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Wednesday 02 April 2025 12:19:44 -0400 (0:00:00.058) 0:01:59.288 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:19:45 -0400 (0:00:00.067) 0:01:59.355 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:19:45 -0400 (0:00:00.044) 0:01:59.400 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:19:45 -0400 (0:00:00.052) 0:01:59.452 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_group": "1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:19:45 -0400 (0:00:00.060) 0:01:59.513 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610609.2783134, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1743610533.3987217, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986653, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "4263604762", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:19:45 -0400 (0:00:00.396) 0:01:59.909 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_basic" ], "delta": "0:00:00.003360", "end": "2025-04-02 12:19:45.883614", "rc": 0, "start": "2025-04-02 12:19:45.880254" } STDOUT: 0: user_quadlet_basic 100000 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:19:45 -0400 (0:00:00.398) 0:02:00.307 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_basic" ], "delta": "0:00:00.003421", "end": "2025-04-02 12:19:46.319580", "rc": 0, "start": "2025-04-02 12:19:46.316159" } STDOUT: 0: user_quadlet_basic 100000 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:19:46 -0400 (0:00:00.515) 0:02:00.822 ******* ok: [managed-node1] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_basic": { "range": 65536, "start": 100000 } }, "podman_subuid_info": { "user_quadlet_basic": { "range": 65536, "start": 100000 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:19:46 -0400 (0:00:00.087) 0:02:00.910 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:19:46 -0400 (0:00:00.050) 0:02:00.960 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:19:46 -0400 (0:00:00.051) 0:02:01.012 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:19:46 -0400 (0:00:00.045) 0:02:01.057 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:19:46 -0400 (0:00:00.049) 0:02:01.107 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Wednesday 02 April 2025 12:19:46 -0400 (0:00:00.038) 0:02:01.146 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-basic-unused-volume-volume.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_basic", "__podman_xdg_runtime_dir": "/run/user/1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Wednesday 02 April 2025 12:19:46 -0400 (0:00:00.069) 0:02:01.215 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_basic/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Wednesday 02 April 2025 12:19:46 -0400 (0:00:00.041) 0:02:01.257 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Wednesday 02 April 2025 12:19:46 -0400 (0:00:00.038) 0:02:01.295 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-volume.volume", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Wednesday 02 April 2025 12:19:47 -0400 (0:00:00.106) 0:02:01.402 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Wednesday 02 April 2025 12:19:47 -0400 (0:00:00.054) 0:02:01.457 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Wednesday 02 April 2025 12:19:47 -0400 (0:00:00.045) 0:02:01.502 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Wednesday 02 April 2025 12:19:47 -0400 (0:00:00.093) 0:02:01.595 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Wednesday 02 April 2025 12:19:47 -0400 (0:00:00.061) 0:02:01.657 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "loginctl", "enable-linger", "user_quadlet_basic" ], "rc": 0 } STDOUT: skipped, since /var/lib/systemd/linger/user_quadlet_basic exists TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Wednesday 02 April 2025 12:19:47 -0400 (0:00:00.444) 0:02:02.102 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Wednesday 02 April 2025 12:19:47 -0400 (0:00:00.046) 0:02:02.148 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Wednesday 02 April 2025 12:19:47 -0400 (0:00:00.045) 0:02:02.194 ******* TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Wednesday 02 April 2025 12:19:47 -0400 (0:00:00.053) 0:02:02.247 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Wednesday 02 April 2025 12:19:47 -0400 (0:00:00.057) 0:02:02.305 ******* ok: [managed-node1] => { "changed": false, "gid": 1111, "group": "user_quadlet_basic", "mode": "0755", "owner": "user_quadlet_basic", "path": "/home/user_quadlet_basic/.config/containers/systemd", "secontext": "unconfined_u:object_r:config_home_t:s0", "size": 113, "state": "directory", "uid": 1111 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Wednesday 02 April 2025 12:19:48 -0400 (0:00:00.427) 0:02:02.732 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Wednesday 02 April 2025 12:19:48 -0400 (0:00:00.045) 0:02:02.777 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Wednesday 02 April 2025 12:19:48 -0400 (0:00:00.048) 0:02:02.826 ******* changed: [managed-node1] => { "changed": true, "checksum": "fd0ae560360afa5541b866560b1e849d25e216ef", "dest": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-volume.volume", "gid": 1111, "group": "user_quadlet_basic", "md5sum": "4967598a0284ad3e296ab106829a30a2", "mode": "0644", "owner": "user_quadlet_basic", "secontext": "unconfined_u:object_r:config_home_t:s0", "size": 53, "src": "/root/.ansible/tmp/ansible-tmp-1743610788.5443847-16647-92259202002266/source", "state": "file", "uid": 1111 } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Wednesday 02 April 2025 12:19:49 -0400 (0:00:00.728) 0:02:03.554 ******* ok: [managed-node1] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Wednesday 02 April 2025 12:19:49 -0400 (0:00:00.567) 0:02:04.121 ******* changed: [managed-node1] => { "changed": true, "name": "quadlet-basic-unused-volume-volume.service", "state": "started", "status": { "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "-.mount run-user-1111.mount basic.target -.slice", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "quadlet-basic-unused-volume-volume.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore systemd-quadlet-basic-unused-volume ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/run/user/1111/systemd/generator/quadlet-basic-unused-volume-volume.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "quadlet-basic-unused-volume-volume.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "inherit", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "MemoryAccounting": "yes", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "quadlet-basic-unused-volume-volume.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "-.slice basic.target", "RequiresMountsFor": "/run/user/1111/containers /home/user_quadlet_basic", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "-.slice", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-basic-unused-volume-volume", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22405", "TimeoutStartUSec": "infinity", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "generated", "UtmpMode": "init", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0", "WorkingDirectory": "!/home/user_quadlet_basic" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Wednesday 02 April 2025 12:19:50 -0400 (0:00:00.633) 0:02:04.755 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Wednesday 02 April 2025 12:19:50 -0400 (0:00:00.064) 0:02:04.819 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Container": { "ContainerName": "quadlet-basic-mysql-name", "Environment": [ "FOO=/bin/busybox-extras", "BAZ=test" ], "Image": "quay.io/linux-system-roles/mysql:5.6", "Network": "quadlet-basic.network", "PodmanArgs": "--secret=mysql_container_root_password,type=env,target=MYSQL_ROOT_PASSWORD --secret=json_secret,type=mount,target=/tmp/test.json", "Volume": "quadlet-basic-mysql.volume:/var/lib/mysql" }, "Install": { "WantedBy": "default.target" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Wednesday 02 April 2025 12:19:50 -0400 (0:00:00.076) 0:02:04.895 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_basic" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Wednesday 02 April 2025 12:19:50 -0400 (0:00:00.073) 0:02:04.969 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Wednesday 02 April 2025 12:19:50 -0400 (0:00:00.061) 0:02:05.030 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-basic-mysql", "__podman_quadlet_type": "container", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Wednesday 02 April 2025 12:19:50 -0400 (0:00:00.083) 0:02:05.113 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:19:50 -0400 (0:00:00.108) 0:02:05.222 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:19:50 -0400 (0:00:00.071) 0:02:05.294 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:19:51 -0400 (0:00:00.068) 0:02:05.363 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_group": "1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:19:51 -0400 (0:00:00.081) 0:02:05.444 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610609.2783134, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1743610533.3987217, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986653, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "4263604762", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:19:51 -0400 (0:00:00.435) 0:02:05.880 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_basic" ], "delta": "0:00:00.003972", "end": "2025-04-02 12:19:51.850710", "rc": 0, "start": "2025-04-02 12:19:51.846738" } STDOUT: 0: user_quadlet_basic 100000 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:19:51 -0400 (0:00:00.376) 0:02:06.256 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_basic" ], "delta": "0:00:00.003479", "end": "2025-04-02 12:19:52.230179", "rc": 0, "start": "2025-04-02 12:19:52.226700" } STDOUT: 0: user_quadlet_basic 100000 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:19:52 -0400 (0:00:00.378) 0:02:06.635 ******* ok: [managed-node1] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_basic": { "range": 65536, "start": 100000 } }, "podman_subuid_info": { "user_quadlet_basic": { "range": 65536, "start": 100000 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:19:52 -0400 (0:00:00.117) 0:02:06.752 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:19:52 -0400 (0:00:00.040) 0:02:06.793 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:19:52 -0400 (0:00:00.050) 0:02:06.843 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:19:52 -0400 (0:00:00.038) 0:02:06.881 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:19:52 -0400 (0:00:00.039) 0:02:06.921 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Wednesday 02 April 2025 12:19:52 -0400 (0:00:00.040) 0:02:06.962 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [ "quay.io/linux-system-roles/mysql:5.6" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-basic-mysql.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_basic", "__podman_xdg_runtime_dir": "/run/user/1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Wednesday 02 April 2025 12:19:52 -0400 (0:00:00.065) 0:02:07.027 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_basic/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Wednesday 02 April 2025 12:19:52 -0400 (0:00:00.042) 0:02:07.070 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Wednesday 02 April 2025 12:19:52 -0400 (0:00:00.053) 0:02:07.124 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_images": [ "quay.io/linux-system-roles/mysql:5.6" ], "__podman_quadlet_file": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.container", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Wednesday 02 April 2025 12:19:52 -0400 (0:00:00.135) 0:02:07.259 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Wednesday 02 April 2025 12:19:52 -0400 (0:00:00.064) 0:02:07.324 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Wednesday 02 April 2025 12:19:53 -0400 (0:00:00.057) 0:02:07.381 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Wednesday 02 April 2025 12:19:53 -0400 (0:00:00.128) 0:02:07.510 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Wednesday 02 April 2025 12:19:53 -0400 (0:00:00.098) 0:02:07.609 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "loginctl", "enable-linger", "user_quadlet_basic" ], "rc": 0 } STDOUT: skipped, since /var/lib/systemd/linger/user_quadlet_basic exists TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Wednesday 02 April 2025 12:19:53 -0400 (0:00:00.394) 0:02:08.004 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Wednesday 02 April 2025 12:19:53 -0400 (0:00:00.105) 0:02:08.109 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Wednesday 02 April 2025 12:19:53 -0400 (0:00:00.059) 0:02:08.169 ******* TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Wednesday 02 April 2025 12:19:53 -0400 (0:00:00.056) 0:02:08.225 ******* changed: [managed-node1] => (item=None) => { "attempts": 1, "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } changed: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Wednesday 02 April 2025 12:20:00 -0400 (0:00:07.098) 0:02:15.324 ******* ok: [managed-node1] => { "changed": false, "gid": 1111, "group": "user_quadlet_basic", "mode": "0755", "owner": "user_quadlet_basic", "path": "/home/user_quadlet_basic/.config/containers/systemd", "secontext": "unconfined_u:object_r:config_home_t:s0", "size": 155, "state": "directory", "uid": 1111 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Wednesday 02 April 2025 12:20:01 -0400 (0:00:00.386) 0:02:15.711 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Wednesday 02 April 2025 12:20:01 -0400 (0:00:00.038) 0:02:15.749 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Wednesday 02 April 2025 12:20:01 -0400 (0:00:00.042) 0:02:15.792 ******* changed: [managed-node1] => { "changed": true, "checksum": "0b6cac7929623f1059e78ef39b8b0a25169b28a6", "dest": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.container", "gid": 1111, "group": "user_quadlet_basic", "md5sum": "1ede2d50fe62a3ca756acb50f2f6868e", "mode": "0644", "owner": "user_quadlet_basic", "secontext": "unconfined_u:object_r:config_home_t:s0", "size": 448, "src": "/root/.ansible/tmp/ansible-tmp-1743610801.5122783-17111-32695399448437/source", "state": "file", "uid": 1111 } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Wednesday 02 April 2025 12:20:02 -0400 (0:00:00.766) 0:02:16.559 ******* ok: [managed-node1] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Wednesday 02 April 2025 12:20:02 -0400 (0:00:00.540) 0:02:17.099 ******* changed: [managed-node1] => { "changed": true, "name": "quadlet-basic-mysql.service", "state": "started", "status": { "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "quadlet-basic-network.service -.mount -.slice quadlet-basic-mysql-volume.service basic.target run-user-1111.mount", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "default.target shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "yes", "DelegateControllers": "cpu cpuacct cpuset io blkio memory devices pids", "Description": "quadlet-basic-mysql.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-basic-mysql.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name=quadlet-basic-mysql-name --cidfile=/run/user/1111/quadlet-basic-mysql.cid --replace --rm --cgroups=split --network=quadlet-basic-name --sdnotify=conmon -d -v quadlet-basic-mysql-name:/var/lib/mysql --env BAZ=test --env FOO=/bin/busybox-extras --secret=mysql_container_root_password,type=env,target=MYSQL_ROOT_PASSWORD --secret=json_secret,type=mount,target=/tmp/test.json quay.io/linux-system-roles/mysql:5.6 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/1111/quadlet-basic-mysql.cid ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/1111/quadlet-basic-mysql.cid ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/run/user/1111/systemd/generator/quadlet-basic-mysql.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "quadlet-basic-mysql.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "inherit", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "MemoryAccounting": "yes", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "quadlet-basic-mysql.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "-.slice quadlet-basic-mysql-volume.service basic.target quadlet-basic-network.service", "RequiresMountsFor": "/run/user/1111/containers /home/user_quadlet_basic", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "-.slice", "SourcePath": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.container", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-basic-mysql", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22405", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "generated", "UtmpMode": "init", "WantedBy": "default.target", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0", "WorkingDirectory": "!/home/user_quadlet_basic" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Wednesday 02 April 2025 12:20:03 -0400 (0:00:01.087) 0:02:18.186 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:198 Wednesday 02 April 2025 12:20:03 -0400 (0:00:00.041) 0:02:18.228 ******* TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:204 Wednesday 02 April 2025 12:20:03 -0400 (0:00:00.034) 0:02:18.262 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:213 Wednesday 02 April 2025 12:20:03 -0400 (0:00:00.040) 0:02:18.302 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Check files] ************************************************************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:201 Wednesday 02 April 2025 12:20:04 -0400 (0:00:00.047) 0:02:18.350 ******* ok: [managed-node1] => (item=quadlet-basic-mysql.container) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "cat", "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.container" ], "delta": "0:00:00.002542", "end": "2025-04-02 12:20:04.366344", "item": "quadlet-basic-mysql.container", "rc": 0, "start": "2025-04-02 12:20:04.363802" } STDOUT: # # Ansible managed # # system_role:podman [Install] WantedBy=default.target [Container] Image=quay.io/linux-system-roles/mysql:5.6 ContainerName=quadlet-basic-mysql-name Volume=quadlet-basic-mysql.volume:/var/lib/mysql Network=quadlet-basic.network PodmanArgs=--secret=mysql_container_root_password,type=env,target=MYSQL_ROOT_PASSWORD --secret=json_secret,type=mount,target=/tmp/test.json Environment=FOO=/bin/busybox-extras Environment=BAZ=test ok: [managed-node1] => (item=quadlet-basic.network) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "cat", "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic.network" ], "delta": "0:00:00.002645", "end": "2025-04-02 12:20:04.713314", "item": "quadlet-basic.network", "rc": 0, "start": "2025-04-02 12:20:04.710669" } STDOUT: [Network] Subnet=192.168.29.0/24 Gateway=192.168.29.1 Label=app=wordpress NetworkName=quadlet-basic-name ok: [managed-node1] => (item=quadlet-basic-mysql.volume) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "cat", "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.volume" ], "delta": "0:00:00.002317", "end": "2025-04-02 12:20:05.117221", "item": "quadlet-basic-mysql.volume", "rc": 0, "start": "2025-04-02 12:20:05.114904" } STDOUT: # # Ansible managed # # system_role:podman [Volume] VolumeName=quadlet-basic-mysql-name TASK [Ensure linger] *********************************************************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:211 Wednesday 02 April 2025 12:20:05 -0400 (0:00:01.203) 0:02:19.553 ******* ok: [managed-node1] => { "changed": false, "failed_when_result": false, "stat": { "atime": 1743610762.2336547, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1743610762.2336547, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 50331851, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0644", "mtime": 1743610762.2336547, "nlink": 1, "path": "/var/lib/systemd/linger/user_quadlet_basic", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 0, "uid": 0, "version": "2419848977", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Cleanup user] ************************************************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:219 Wednesday 02 April 2025 12:20:05 -0400 (0:00:00.509) 0:02:20.063 ******* TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Wednesday 02 April 2025 12:20:05 -0400 (0:00:00.128) 0:02:20.192 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Wednesday 02 April 2025 12:20:05 -0400 (0:00:00.062) 0:02:20.254 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Wednesday 02 April 2025 12:20:05 -0400 (0:00:00.046) 0:02:20.301 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Wednesday 02 April 2025 12:20:06 -0400 (0:00:00.044) 0:02:20.346 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Wednesday 02 April 2025 12:20:06 -0400 (0:00:00.058) 0:02:20.404 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Wednesday 02 April 2025 12:20:06 -0400 (0:00:00.062) 0:02:20.467 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Wednesday 02 April 2025 12:20:06 -0400 (0:00:00.058) 0:02:20.526 ******* ok: [managed-node1] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node1] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node1] => (item=CentOS_8.yml) => { "ansible_facts": { "__podman_packages": [ "crun", "podman", "podman-plugins", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [managed-node1] => (item=CentOS_8.yml) => { "ansible_facts": { "__podman_packages": [ "crun", "podman", "podman-plugins", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Wednesday 02 April 2025 12:20:06 -0400 (0:00:00.142) 0:02:20.668 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Wednesday 02 April 2025 12:20:08 -0400 (0:00:01.702) 0:02:22.370 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Wednesday 02 April 2025 12:20:08 -0400 (0:00:00.048) 0:02:22.419 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Wednesday 02 April 2025 12:20:08 -0400 (0:00:00.052) 0:02:22.471 ******* skipping: [managed-node1] => {} TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Wednesday 02 April 2025 12:20:08 -0400 (0:00:00.036) 0:02:22.508 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Wednesday 02 April 2025 12:20:08 -0400 (0:00:00.038) 0:02:22.547 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Wednesday 02 April 2025 12:20:08 -0400 (0:00:00.036) 0:02:22.583 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.031436", "end": "2025-04-02 12:20:08.586712", "rc": 0, "start": "2025-04-02 12:20:08.555276" } STDOUT: podman version 4.9.4-dev TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Wednesday 02 April 2025 12:20:08 -0400 (0:00:00.410) 0:02:22.994 ******* ok: [managed-node1] => { "ansible_facts": { "podman_version": "4.9.4-dev" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Wednesday 02 April 2025 12:20:08 -0400 (0:00:00.038) 0:02:23.033 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Wednesday 02 April 2025 12:20:08 -0400 (0:00:00.037) 0:02:23.071 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } META: end_host conditional evaluated to false, continuing execution for managed-node1 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Wednesday 02 April 2025 12:20:08 -0400 (0:00:00.184) 0:02:23.255 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } META: end_host conditional evaluated to false, continuing execution for managed-node1 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Wednesday 02 April 2025 12:20:09 -0400 (0:00:00.199) 0:02:23.455 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:20:09 -0400 (0:00:00.184) 0:02:23.640 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:20:09 -0400 (0:00:00.070) 0:02:23.710 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:20:09 -0400 (0:00:00.072) 0:02:23.782 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_group": "1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:20:09 -0400 (0:00:00.078) 0:02:23.861 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610609.2783134, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1743610533.3987217, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986653, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "4263604762", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:20:10 -0400 (0:00:00.509) 0:02:24.370 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_basic" ], "delta": "0:00:00.004648", "end": "2025-04-02 12:20:10.497977", "rc": 0, "start": "2025-04-02 12:20:10.493329" } STDOUT: 0: user_quadlet_basic 100000 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:20:10 -0400 (0:00:00.584) 0:02:24.955 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_basic" ], "delta": "0:00:00.004927", "end": "2025-04-02 12:20:11.085169", "rc": 0, "start": "2025-04-02 12:20:11.080242" } STDOUT: 0: user_quadlet_basic 100000 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:20:11 -0400 (0:00:00.583) 0:02:25.538 ******* ok: [managed-node1] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_basic": { "range": 65536, "start": 100000 } }, "podman_subuid_info": { "user_quadlet_basic": { "range": 65536, "start": 100000 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:20:11 -0400 (0:00:00.086) 0:02:25.625 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:20:11 -0400 (0:00:00.055) 0:02:25.680 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:20:11 -0400 (0:00:00.044) 0:02:25.724 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:20:11 -0400 (0:00:00.045) 0:02:25.770 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:20:11 -0400 (0:00:00.046) 0:02:25.816 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Wednesday 02 April 2025 12:20:11 -0400 (0:00:00.047) 0:02:25.864 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_container_conf_file": "/home/user_quadlet_basic/.config/containers/containers.conf.d/50-systemroles.conf", "__podman_parent_mode": "0700", "__podman_parent_path": "/home/user_quadlet_basic/.config/containers", "__podman_policy_json_file": "/home/user_quadlet_basic/.config/containers/policy.json", "__podman_registries_conf_file": "/home/user_quadlet_basic/.config/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/home/user_quadlet_basic/.config/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:126 Wednesday 02 April 2025 12:20:11 -0400 (0:00:00.049) 0:02:25.913 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Wednesday 02 April 2025 12:20:11 -0400 (0:00:00.121) 0:02:26.034 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Wednesday 02 April 2025 12:20:11 -0400 (0:00:00.054) 0:02:26.089 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:129 Wednesday 02 April 2025 12:20:11 -0400 (0:00:00.057) 0:02:26.146 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Wednesday 02 April 2025 12:20:11 -0400 (0:00:00.110) 0:02:26.257 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Wednesday 02 April 2025 12:20:11 -0400 (0:00:00.078) 0:02:26.335 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:132 Wednesday 02 April 2025 12:20:12 -0400 (0:00:00.054) 0:02:26.390 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:7 Wednesday 02 April 2025 12:20:12 -0400 (0:00:00.084) 0:02:26.474 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:15 Wednesday 02 April 2025 12:20:12 -0400 (0:00:00.050) 0:02:26.525 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:135 Wednesday 02 April 2025 12:20:12 -0400 (0:00:00.042) 0:02:26.567 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:8 Wednesday 02 April 2025 12:20:12 -0400 (0:00:00.070) 0:02:26.638 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:16 Wednesday 02 April 2025 12:20:12 -0400 (0:00:00.036) 0:02:26.674 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:21 Wednesday 02 April 2025 12:20:12 -0400 (0:00:00.035) 0:02:26.710 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:27 Wednesday 02 April 2025 12:20:12 -0400 (0:00:00.038) 0:02:26.748 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:141 Wednesday 02 April 2025 12:20:12 -0400 (0:00:00.035) 0:02:26.784 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:148 Wednesday 02 April 2025 12:20:12 -0400 (0:00:00.091) 0:02:26.875 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:155 Wednesday 02 April 2025 12:20:12 -0400 (0:00:00.037) 0:02:26.913 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:159 Wednesday 02 April 2025 12:20:12 -0400 (0:00:00.037) 0:02:26.950 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:168 Wednesday 02 April 2025 12:20:12 -0400 (0:00:00.035) 0:02:26.986 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:177 Wednesday 02 April 2025 12:20:12 -0400 (0:00:00.034) 0:02:27.020 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node1 included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Wednesday 02 April 2025 12:20:12 -0400 (0:00:00.117) 0:02:27.138 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_user": "user_quadlet_basic" }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Wednesday 02 April 2025 12:20:12 -0400 (0:00:00.040) 0:02:27.178 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:20:12 -0400 (0:00:00.066) 0:02:27.245 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:20:12 -0400 (0:00:00.045) 0:02:27.291 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:20:12 -0400 (0:00:00.045) 0:02:27.337 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_group": "1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:20:13 -0400 (0:00:00.057) 0:02:27.394 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:20:13 -0400 (0:00:00.045) 0:02:27.439 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:20:13 -0400 (0:00:00.049) 0:02:27.489 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:20:13 -0400 (0:00:00.057) 0:02:27.547 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:20:13 -0400 (0:00:00.062) 0:02:27.609 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:20:13 -0400 (0:00:00.048) 0:02:27.658 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:20:13 -0400 (0:00:00.112) 0:02:27.771 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:20:13 -0400 (0:00:00.041) 0:02:27.812 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:20:13 -0400 (0:00:00.038) 0:02:27.850 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14 Wednesday 02 April 2025 12:20:13 -0400 (0:00:00.037) 0:02:27.888 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_rootless": true, "__podman_xdg_runtime_dir": "/run/user/1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20 Wednesday 02 April 2025 12:20:13 -0400 (0:00:00.046) 0:02:27.935 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Wednesday 02 April 2025 12:20:13 -0400 (0:00:00.074) 0:02:28.009 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Wednesday 02 April 2025 12:20:13 -0400 (0:00:00.063) 0:02:28.073 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Wednesday 02 April 2025 12:20:13 -0400 (0:00:00.056) 0:02:28.130 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_cancel_user_linger": [ "user_quadlet_basic" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25 Wednesday 02 April 2025 12:20:13 -0400 (0:00:00.048) 0:02:28.179 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610762.2496548, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1743610803.6487248, "dev": 36, "device_type": 0, "executable": true, "exists": true, "gid": 1111, "gr_name": "user_quadlet_basic", "inode": 28283, "isblk": false, "ischr": false, "isdir": true, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/directory", "mode": "0700", "mtime": 1743610803.6487248, "nlink": 8, "path": "/run/user/1111", "pw_name": "user_quadlet_basic", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 200, "uid": 1111, "version": null, "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": true } } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41 Wednesday 02 April 2025 12:20:14 -0400 (0:00:00.398) 0:02:28.577 ******* [WARNING]: Using a variable for a task's 'args' is unsafe in some situations (see https://docs.ansible.com/ansible/devel/reference_appendices/faq.html#argsplat- unsafe) changed: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Wednesday 02 April 2025 12:20:14 -0400 (0:00:00.483) 0:02:29.060 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_user": "user_quadlet_basic" }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Wednesday 02 April 2025 12:20:14 -0400 (0:00:00.066) 0:02:29.127 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:20:14 -0400 (0:00:00.103) 0:02:29.230 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:20:15 -0400 (0:00:00.144) 0:02:29.375 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:20:15 -0400 (0:00:00.088) 0:02:29.463 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_group": "1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:20:15 -0400 (0:00:00.080) 0:02:29.544 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:20:15 -0400 (0:00:00.056) 0:02:29.600 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:20:15 -0400 (0:00:00.058) 0:02:29.659 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:20:15 -0400 (0:00:00.058) 0:02:29.718 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:20:15 -0400 (0:00:00.057) 0:02:29.775 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:20:15 -0400 (0:00:00.058) 0:02:29.834 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:20:15 -0400 (0:00:00.055) 0:02:29.890 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:20:15 -0400 (0:00:00.059) 0:02:29.949 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:20:15 -0400 (0:00:00.043) 0:02:29.993 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14 Wednesday 02 April 2025 12:20:15 -0400 (0:00:00.048) 0:02:30.042 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_rootless": true, "__podman_xdg_runtime_dir": "/run/user/1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20 Wednesday 02 April 2025 12:20:15 -0400 (0:00:00.057) 0:02:30.100 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Wednesday 02 April 2025 12:20:15 -0400 (0:00:00.071) 0:02:30.171 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Wednesday 02 April 2025 12:20:15 -0400 (0:00:00.039) 0:02:30.211 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Wednesday 02 April 2025 12:20:15 -0400 (0:00:00.038) 0:02:30.249 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_cancel_user_linger": [ "user_quadlet_basic" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25 Wednesday 02 April 2025 12:20:15 -0400 (0:00:00.050) 0:02:30.300 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610762.2496548, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1743610803.6487248, "dev": 36, "device_type": 0, "executable": true, "exists": true, "gid": 1111, "gr_name": "user_quadlet_basic", "inode": 28283, "isblk": false, "ischr": false, "isdir": true, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/directory", "mode": "0700", "mtime": 1743610803.6487248, "nlink": 8, "path": "/run/user/1111", "pw_name": "user_quadlet_basic", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 200, "uid": 1111, "version": null, "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": true } } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41 Wednesday 02 April 2025 12:20:16 -0400 (0:00:00.482) 0:02:30.782 ******* [WARNING]: Using a variable for a task's 'args' is unsafe in some situations (see https://docs.ansible.com/ansible/devel/reference_appendices/faq.html#argsplat- unsafe) changed: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:184 Wednesday 02 April 2025 12:20:16 -0400 (0:00:00.472) 0:02:31.255 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:191 Wednesday 02 April 2025 12:20:16 -0400 (0:00:00.034) 0:02:31.290 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Wednesday 02 April 2025 12:20:17 -0400 (0:00:00.149) 0:02:31.439 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Container": { "ContainerName": "quadlet-basic-mysql-name", "Environment": [ "FOO=/bin/busybox-extras", "BAZ=test" ], "Image": "quay.io/linux-system-roles/mysql:5.6", "Network": "quadlet-basic.network", "PodmanArgs": "--secret=mysql_container_root_password,type=env,target=MYSQL_ROOT_PASSWORD --secret=json_secret,type=mount,target=/tmp/test.json", "Volume": "quadlet-basic-mysql.volume:/var/lib/mysql" }, "Install": { "WantedBy": "default.target" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Wednesday 02 April 2025 12:20:17 -0400 (0:00:00.095) 0:02:31.535 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_basic" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Wednesday 02 April 2025 12:20:17 -0400 (0:00:00.071) 0:02:31.606 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Wednesday 02 April 2025 12:20:17 -0400 (0:00:00.051) 0:02:31.658 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-basic-mysql", "__podman_quadlet_type": "container", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Wednesday 02 April 2025 12:20:17 -0400 (0:00:00.065) 0:02:31.723 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:20:17 -0400 (0:00:00.080) 0:02:31.803 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:20:17 -0400 (0:00:00.045) 0:02:31.848 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:20:17 -0400 (0:00:00.044) 0:02:31.892 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_group": "1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:20:17 -0400 (0:00:00.051) 0:02:31.944 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610609.2783134, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1743610533.3987217, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986653, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "4263604762", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:20:18 -0400 (0:00:00.444) 0:02:32.389 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_basic" ], "delta": "0:00:00.003378", "end": "2025-04-02 12:20:18.388412", "rc": 0, "start": "2025-04-02 12:20:18.385034" } STDOUT: 0: user_quadlet_basic 100000 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:20:18 -0400 (0:00:00.404) 0:02:32.793 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_basic" ], "delta": "0:00:00.003732", "end": "2025-04-02 12:20:18.764447", "rc": 0, "start": "2025-04-02 12:20:18.760715" } STDOUT: 0: user_quadlet_basic 100000 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:20:18 -0400 (0:00:00.385) 0:02:33.178 ******* ok: [managed-node1] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_basic": { "range": 65536, "start": 100000 } }, "podman_subuid_info": { "user_quadlet_basic": { "range": 65536, "start": 100000 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:20:18 -0400 (0:00:00.057) 0:02:33.236 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:20:18 -0400 (0:00:00.038) 0:02:33.275 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:20:18 -0400 (0:00:00.040) 0:02:33.315 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:20:19 -0400 (0:00:00.048) 0:02:33.364 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:20:19 -0400 (0:00:00.062) 0:02:33.426 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Wednesday 02 April 2025 12:20:19 -0400 (0:00:00.064) 0:02:33.490 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [ "quay.io/linux-system-roles/mysql:5.6" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-basic-mysql.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_basic", "__podman_xdg_runtime_dir": "/run/user/1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Wednesday 02 April 2025 12:20:19 -0400 (0:00:00.103) 0:02:33.593 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_basic/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Wednesday 02 April 2025 12:20:19 -0400 (0:00:00.067) 0:02:33.661 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Wednesday 02 April 2025 12:20:19 -0400 (0:00:00.058) 0:02:33.720 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_images": [ "quay.io/linux-system-roles/mysql:5.6" ], "__podman_quadlet_file": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.container", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Wednesday 02 April 2025 12:20:19 -0400 (0:00:00.138) 0:02:33.858 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Wednesday 02 April 2025 12:20:19 -0400 (0:00:00.064) 0:02:33.923 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Wednesday 02 April 2025 12:20:19 -0400 (0:00:00.164) 0:02:34.087 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610762.2496548, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1743610803.6487248, "dev": 36, "device_type": 0, "executable": true, "exists": true, "gid": 1111, "gr_name": "user_quadlet_basic", "inode": 28283, "isblk": false, "ischr": false, "isdir": true, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/directory", "mode": "0700", "mtime": 1743610803.6487248, "nlink": 8, "path": "/run/user/1111", "pw_name": "user_quadlet_basic", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 200, "uid": 1111, "version": null, "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": true } } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Wednesday 02 April 2025 12:20:20 -0400 (0:00:00.383) 0:02:34.471 ******* changed: [managed-node1] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-basic-mysql.service", "state": "stopped", "status": { "ActiveEnterTimestamp": "Wed 2025-04-02 12:20:03 EDT", "ActiveEnterTimestampMonotonic": "73754230", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "quadlet-basic-network.service -.mount -.slice quadlet-basic-mysql-volume.service basic.target run-user-1111.mount", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "yes", "AssertTimestamp": "Wed 2025-04-02 12:20:03 EDT", "AssertTimestampMonotonic": "73248231", "Before": "default.target shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Wed 2025-04-02 12:20:03 EDT", "ConditionTimestampMonotonic": "73248229", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/user.slice/user-1111.slice/user@1111.service/quadlet-basic-mysql.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "yes", "DelegateControllers": "cpu cpuacct cpuset io blkio memory devices pids", "Description": "quadlet-basic-mysql.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-basic-mysql.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "8786", "ExecMainStartTimestamp": "Wed 2025-04-02 12:20:03 EDT", "ExecMainStartTimestampMonotonic": "73754208", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name=quadlet-basic-mysql-name --cidfile=/run/user/1111/quadlet-basic-mysql.cid --replace --rm --cgroups=split --network=quadlet-basic-name --sdnotify=conmon -d -v quadlet-basic-mysql-name:/var/lib/mysql --env BAZ=test --env FOO=/bin/busybox-extras --secret=mysql_container_root_password,type=env,target=MYSQL_ROOT_PASSWORD --secret=json_secret,type=mount,target=/tmp/test.json quay.io/linux-system-roles/mysql:5.6 ; ignore_errors=no ; start_time=[Wed 2025-04-02 12:20:03 EDT] ; stop_time=[n/a] ; pid=8624 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/1111/quadlet-basic-mysql.cid ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/1111/quadlet-basic-mysql.cid ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/run/user/1111/systemd/generator/quadlet-basic-mysql.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "quadlet-basic-mysql.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Wed 2025-04-02 12:20:03 EDT", "InactiveExitTimestampMonotonic": "73248887", "InvocationID": "c1bc1b1e250e40298a969d23ba11bbfa", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "inherit", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "8786", "MemoryAccounting": "yes", "MemoryCurrent": "611393536", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "quadlet-basic-mysql.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "-.slice quadlet-basic-mysql-volume.service basic.target quadlet-basic-network.service", "RequiresMountsFor": "/run/user/1111/containers /home/user_quadlet_basic", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "-.slice", "SourcePath": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.container", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Wed 2025-04-02 12:20:03 EDT", "StateChangeTimestampMonotonic": "73754230", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-basic-mysql", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "23", "TasksMax": "22405", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "generated", "UtmpMode": "init", "WantedBy": "default.target", "WatchdogTimestamp": "Wed 2025-04-02 12:20:03 EDT", "WatchdogTimestampMonotonic": "73754227", "WatchdogUSec": "0", "WorkingDirectory": "!/home/user_quadlet_basic" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Wednesday 02 April 2025 12:20:22 -0400 (0:00:02.714) 0:02:37.185 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610802.6747198, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "0b6cac7929623f1059e78ef39b8b0a25169b28a6", "ctime": 1743610802.1487172, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 1111, "gr_name": "user_quadlet_basic", "inode": 499122333, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1743610801.8687158, "nlink": 1, "path": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.container", "pw_name": "user_quadlet_basic", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 448, "uid": 1111, "version": "1275006988", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Wednesday 02 April 2025 12:20:23 -0400 (0:00:00.420) 0:02:37.606 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Wednesday 02 April 2025 12:20:23 -0400 (0:00:00.066) 0:02:37.672 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Wednesday 02 April 2025 12:20:23 -0400 (0:00:00.360) 0:02:38.033 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Wednesday 02 April 2025 12:20:23 -0400 (0:00:00.056) 0:02:38.089 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Wednesday 02 April 2025 12:20:23 -0400 (0:00:00.093) 0:02:38.182 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Wednesday 02 April 2025 12:20:23 -0400 (0:00:00.035) 0:02:38.218 ******* changed: [managed-node1] => { "changed": true, "path": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.container", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Wednesday 02 April 2025 12:20:24 -0400 (0:00:00.369) 0:02:38.587 ******* ok: [managed-node1] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Wednesday 02 April 2025 12:20:24 -0400 (0:00:00.588) 0:02:39.176 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Wednesday 02 April 2025 12:20:25 -0400 (0:00:00.519) 0:02:39.696 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Wednesday 02 April 2025 12:20:25 -0400 (0:00:00.085) 0:02:39.781 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Wednesday 02 April 2025 12:20:25 -0400 (0:00:00.065) 0:02:39.847 ******* changed: [managed-node1] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.231174", "end": "2025-04-02 12:20:26.112620", "rc": 0, "start": "2025-04-02 12:20:25.881446" } STDOUT: dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Wednesday 02 April 2025 12:20:26 -0400 (0:00:00.679) 0:02:40.526 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Wednesday 02 April 2025 12:20:26 -0400 (0:00:00.106) 0:02:40.633 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Wednesday 02 April 2025 12:20:26 -0400 (0:00:00.060) 0:02:40.694 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Wednesday 02 April 2025 12:20:26 -0400 (0:00:00.059) 0:02:40.753 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_cancel_user_linger": [ "user_quadlet_basic" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Wednesday 02 April 2025 12:20:26 -0400 (0:00:00.144) 0:02:40.898 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.042642", "end": "2025-04-02 12:20:26.928378", "rc": 0, "start": "2025-04-02 12:20:26.885736" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Wednesday 02 April 2025 12:20:27 -0400 (0:00:00.464) 0:02:41.363 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.041670", "end": "2025-04-02 12:20:27.431806", "rc": 0, "start": "2025-04-02 12:20:27.390136" } STDOUT: local quadlet-basic-mysql-name local systemd-quadlet-basic-unused-volume TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Wednesday 02 April 2025 12:20:27 -0400 (0:00:00.500) 0:02:41.863 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.039630", "end": "2025-04-02 12:20:27.922316", "rc": 0, "start": "2025-04-02 12:20:27.882686" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Wednesday 02 April 2025 12:20:28 -0400 (0:00:00.490) 0:02:42.354 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.067880", "end": "2025-04-02 12:20:28.443453", "rc": 0, "start": "2025-04-02 12:20:28.375573" } STDOUT: podman quadlet-basic-name systemd-quadlet-basic-unused-network TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Wednesday 02 April 2025 12:20:28 -0400 (0:00:00.521) 0:02:42.875 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Wednesday 02 April 2025 12:20:29 -0400 (0:00:00.498) 0:02:43.373 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Wednesday 02 April 2025 12:20:29 -0400 (0:00:00.494) 0:02:43.868 ******* ok: [managed-node1] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "certmonger.service": { "name": "certmonger.service", "source": "systemd", "state": "running", "status": "enabled" }, "chrony-dnssrv@.service": { "name": "chrony-dnssrv@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cni-dhcp.service": { "name": "cni-dhcp.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "static" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus-org.freedesktop.portable1.service": { "name": "dbus-org.freedesktop.portable1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "running", "status": "static" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dnsmasq.service": { "name": "dnsmasq.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "unknown" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "halt-local.service": { "name": "halt-local.service", "source": "systemd", "state": "inactive", "status": "static" }, "import-state.service": { "name": "import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "iprdump.service": { "name": "iprdump.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprinit.service": { "name": "iprinit.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprupdate.service": { "name": "iprupdate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "loadmodules.service": { "name": "loadmodules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "messagebus.service": { "name": "messagebus.service", "source": "systemd", "state": "active", "status": "static" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-convert.service": { "name": "nfs-convert.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-halt.service": { "name": "plymouth-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-kexec.service": { "name": "plymouth-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-poweroff.service": { "name": "plymouth-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-quit.service": { "name": "plymouth-quit.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-read-write.service": { "name": "plymouth-read-write.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-reboot.service": { "name": "plymouth-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-switch-root-initramfs.service": { "name": "plymouth-switch-root-initramfs.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-switch-root.service": { "name": "plymouth-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "active", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-plymouth.service": { "name": "systemd-ask-password-plymouth.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-coredump@0.service": { "name": "systemd-coredump@0.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-portabled.service": { "name": "systemd-portabled.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-resolved.service": { "name": "systemd-resolved.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "masked" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "tcsd.service": { "name": "tcsd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "timedatex.service": { "name": "timedatex.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "tuned.service": { "name": "tuned.service", "source": "systemd", "state": "running", "status": "enabled" }, "unbound-anchor.service": { "name": "unbound-anchor.service", "source": "systemd", "state": "stopped", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "user-runtime-dir@1111.service": { "name": "user-runtime-dir@1111.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "unknown" }, "user@1111.service": { "name": "user@1111.service", "source": "systemd", "state": "running", "status": "unknown" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Wednesday 02 April 2025 12:20:31 -0400 (0:00:01.975) 0:02:45.843 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Wednesday 02 April 2025 12:20:31 -0400 (0:00:00.061) 0:02:45.905 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Volume": {} }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Wednesday 02 April 2025 12:20:31 -0400 (0:00:00.079) 0:02:45.985 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_basic" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Wednesday 02 April 2025 12:20:31 -0400 (0:00:00.089) 0:02:46.074 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Wednesday 02 April 2025 12:20:31 -0400 (0:00:00.060) 0:02:46.134 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-basic-unused-volume", "__podman_quadlet_type": "volume", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Wednesday 02 April 2025 12:20:31 -0400 (0:00:00.084) 0:02:46.218 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:20:31 -0400 (0:00:00.118) 0:02:46.337 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:20:32 -0400 (0:00:00.082) 0:02:46.419 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:20:32 -0400 (0:00:00.075) 0:02:46.494 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_group": "1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:20:32 -0400 (0:00:00.081) 0:02:46.576 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610609.2783134, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1743610533.3987217, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986653, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "4263604762", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:20:32 -0400 (0:00:00.418) 0:02:46.995 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_basic" ], "delta": "0:00:00.003436", "end": "2025-04-02 12:20:33.004894", "rc": 0, "start": "2025-04-02 12:20:33.001458" } STDOUT: 0: user_quadlet_basic 100000 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:20:33 -0400 (0:00:00.419) 0:02:47.415 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_basic" ], "delta": "0:00:00.003999", "end": "2025-04-02 12:20:33.401145", "rc": 0, "start": "2025-04-02 12:20:33.397146" } STDOUT: 0: user_quadlet_basic 100000 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:20:33 -0400 (0:00:00.474) 0:02:47.889 ******* ok: [managed-node1] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_basic": { "range": 65536, "start": 100000 } }, "podman_subuid_info": { "user_quadlet_basic": { "range": 65536, "start": 100000 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:20:33 -0400 (0:00:00.060) 0:02:47.949 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:20:33 -0400 (0:00:00.055) 0:02:48.004 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:20:33 -0400 (0:00:00.068) 0:02:48.073 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:20:33 -0400 (0:00:00.079) 0:02:48.153 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:20:33 -0400 (0:00:00.055) 0:02:48.208 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Wednesday 02 April 2025 12:20:33 -0400 (0:00:00.051) 0:02:48.260 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-basic-unused-volume-volume.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_basic", "__podman_xdg_runtime_dir": "/run/user/1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Wednesday 02 April 2025 12:20:34 -0400 (0:00:00.086) 0:02:48.346 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_basic/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Wednesday 02 April 2025 12:20:34 -0400 (0:00:00.047) 0:02:48.394 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Wednesday 02 April 2025 12:20:34 -0400 (0:00:00.041) 0:02:48.435 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-volume.volume", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Wednesday 02 April 2025 12:20:34 -0400 (0:00:00.101) 0:02:48.536 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Wednesday 02 April 2025 12:20:34 -0400 (0:00:00.062) 0:02:48.599 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Wednesday 02 April 2025 12:20:34 -0400 (0:00:00.144) 0:02:48.743 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610762.2496548, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1743610822.6816459, "dev": 36, "device_type": 0, "executable": true, "exists": true, "gid": 1111, "gr_name": "user_quadlet_basic", "inode": 28283, "isblk": false, "ischr": false, "isdir": true, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/directory", "mode": "0700", "mtime": 1743610822.6816459, "nlink": 8, "path": "/run/user/1111", "pw_name": "user_quadlet_basic", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 180, "uid": 1111, "version": null, "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": true } } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Wednesday 02 April 2025 12:20:34 -0400 (0:00:00.466) 0:02:49.210 ******* changed: [managed-node1] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-basic-unused-volume-volume.service", "state": "stopped", "status": { "ActiveEnterTimestamp": "Wed 2025-04-02 12:19:50 EDT", "ActiveEnterTimestampMonotonic": "60295540", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "-.slice run-user-1111.mount -.mount basic.target", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "yes", "AssertTimestamp": "Wed 2025-04-02 12:19:50 EDT", "AssertTimestampMonotonic": "60249874", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Wed 2025-04-02 12:19:50 EDT", "ConditionTimestampMonotonic": "60249872", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/user.slice/user-1111.slice/user@1111.service/quadlet-basic-unused-volume-volume.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "quadlet-basic-unused-volume-volume.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "ExecMainCode": "1", "ExecMainExitTimestamp": "Wed 2025-04-02 12:19:50 EDT", "ExecMainExitTimestampMonotonic": "60295351", "ExecMainPID": "7291", "ExecMainStartTimestamp": "Wed 2025-04-02 12:19:50 EDT", "ExecMainStartTimestampMonotonic": "60250550", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore systemd-quadlet-basic-unused-volume ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/run/user/1111/systemd/generator/quadlet-basic-unused-volume-volume.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "quadlet-basic-unused-volume-volume.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Wed 2025-04-02 12:19:50 EDT", "InactiveExitTimestampMonotonic": "60250590", "InvocationID": "7fa96302271b48b88235c882a6b61f53", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "inherit", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "MemoryAccounting": "yes", "MemoryCurrent": "0", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "quadlet-basic-unused-volume-volume.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "basic.target -.slice", "RequiresMountsFor": "/run/user/1111/containers /home/user_quadlet_basic", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "-.slice", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Wed 2025-04-02 12:19:50 EDT", "StateChangeTimestampMonotonic": "60295540", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "exited", "SuccessAction": "none", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-basic-unused-volume-volume", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "0", "TasksMax": "22405", "TimeoutStartUSec": "infinity", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "generated", "UtmpMode": "init", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0", "WorkingDirectory": "!/home/user_quadlet_basic" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Wednesday 02 April 2025 12:20:35 -0400 (0:00:00.609) 0:02:49.820 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610789.690653, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "fd0ae560360afa5541b866560b1e849d25e216ef", "ctime": 1743610789.1326501, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 1111, "gr_name": "user_quadlet_basic", "inode": 320864387, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1743610788.8626487, "nlink": 1, "path": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-volume.volume", "pw_name": "user_quadlet_basic", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 53, "uid": 1111, "version": "523155064", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Wednesday 02 April 2025 12:20:35 -0400 (0:00:00.406) 0:02:50.227 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Wednesday 02 April 2025 12:20:35 -0400 (0:00:00.091) 0:02:50.318 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Wednesday 02 April 2025 12:20:36 -0400 (0:00:00.404) 0:02:50.723 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Wednesday 02 April 2025 12:20:36 -0400 (0:00:00.089) 0:02:50.813 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Wednesday 02 April 2025 12:20:36 -0400 (0:00:00.068) 0:02:50.881 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Wednesday 02 April 2025 12:20:36 -0400 (0:00:00.066) 0:02:50.948 ******* changed: [managed-node1] => { "changed": true, "path": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-volume.volume", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Wednesday 02 April 2025 12:20:37 -0400 (0:00:00.438) 0:02:51.387 ******* ok: [managed-node1] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Wednesday 02 April 2025 12:20:37 -0400 (0:00:00.589) 0:02:51.976 ******* changed: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Wednesday 02 April 2025 12:20:38 -0400 (0:00:00.507) 0:02:52.484 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Wednesday 02 April 2025 12:20:38 -0400 (0:00:00.058) 0:02:52.543 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Wednesday 02 April 2025 12:20:38 -0400 (0:00:00.040) 0:02:52.583 ******* changed: [managed-node1] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.042803", "end": "2025-04-02 12:20:38.634639", "rc": 0, "start": "2025-04-02 12:20:38.591836" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Wednesday 02 April 2025 12:20:38 -0400 (0:00:00.470) 0:02:53.054 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Wednesday 02 April 2025 12:20:38 -0400 (0:00:00.114) 0:02:53.169 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Wednesday 02 April 2025 12:20:38 -0400 (0:00:00.068) 0:02:53.237 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Wednesday 02 April 2025 12:20:38 -0400 (0:00:00.047) 0:02:53.284 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_cancel_user_linger": [ "user_quadlet_basic" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Wednesday 02 April 2025 12:20:39 -0400 (0:00:00.074) 0:02:53.359 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.041659", "end": "2025-04-02 12:20:39.392342", "rc": 0, "start": "2025-04-02 12:20:39.350683" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Wednesday 02 April 2025 12:20:39 -0400 (0:00:00.475) 0:02:53.834 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.041710", "end": "2025-04-02 12:20:39.911282", "rc": 0, "start": "2025-04-02 12:20:39.869572" } STDOUT: local quadlet-basic-mysql-name TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Wednesday 02 April 2025 12:20:40 -0400 (0:00:00.516) 0:02:54.350 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.040042", "end": "2025-04-02 12:20:40.442505", "rc": 0, "start": "2025-04-02 12:20:40.402463" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Wednesday 02 April 2025 12:20:40 -0400 (0:00:00.511) 0:02:54.861 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.070782", "end": "2025-04-02 12:20:40.929604", "rc": 0, "start": "2025-04-02 12:20:40.858822" } STDOUT: podman quadlet-basic-name systemd-quadlet-basic-unused-network TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Wednesday 02 April 2025 12:20:41 -0400 (0:00:00.504) 0:02:55.365 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Wednesday 02 April 2025 12:20:41 -0400 (0:00:00.512) 0:02:55.878 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Wednesday 02 April 2025 12:20:42 -0400 (0:00:00.479) 0:02:56.357 ******* ok: [managed-node1] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "certmonger.service": { "name": "certmonger.service", "source": "systemd", "state": "running", "status": "enabled" }, "chrony-dnssrv@.service": { "name": "chrony-dnssrv@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cni-dhcp.service": { "name": "cni-dhcp.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "static" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus-org.freedesktop.portable1.service": { "name": "dbus-org.freedesktop.portable1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "running", "status": "static" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dnsmasq.service": { "name": "dnsmasq.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "unknown" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "halt-local.service": { "name": "halt-local.service", "source": "systemd", "state": "inactive", "status": "static" }, "import-state.service": { "name": "import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "iprdump.service": { "name": "iprdump.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprinit.service": { "name": "iprinit.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprupdate.service": { "name": "iprupdate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "loadmodules.service": { "name": "loadmodules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "messagebus.service": { "name": "messagebus.service", "source": "systemd", "state": "active", "status": "static" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-convert.service": { "name": "nfs-convert.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-halt.service": { "name": "plymouth-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-kexec.service": { "name": "plymouth-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-poweroff.service": { "name": "plymouth-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-quit.service": { "name": "plymouth-quit.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-read-write.service": { "name": "plymouth-read-write.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-reboot.service": { "name": "plymouth-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-switch-root-initramfs.service": { "name": "plymouth-switch-root-initramfs.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-switch-root.service": { "name": "plymouth-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "active", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-plymouth.service": { "name": "systemd-ask-password-plymouth.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-coredump@0.service": { "name": "systemd-coredump@0.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-portabled.service": { "name": "systemd-portabled.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-resolved.service": { "name": "systemd-resolved.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "masked" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "tcsd.service": { "name": "tcsd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "timedatex.service": { "name": "timedatex.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "tuned.service": { "name": "tuned.service", "source": "systemd", "state": "running", "status": "enabled" }, "unbound-anchor.service": { "name": "unbound-anchor.service", "source": "systemd", "state": "stopped", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "user-runtime-dir@1111.service": { "name": "user-runtime-dir@1111.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "unknown" }, "user@1111.service": { "name": "user@1111.service", "source": "systemd", "state": "running", "status": "unknown" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Wednesday 02 April 2025 12:20:43 -0400 (0:00:01.684) 0:02:58.042 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Wednesday 02 April 2025 12:20:43 -0400 (0:00:00.134) 0:02:58.176 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Volume": { "VolumeName": "quadlet-basic-mysql-name" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Wednesday 02 April 2025 12:20:43 -0400 (0:00:00.077) 0:02:58.254 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_basic" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Wednesday 02 April 2025 12:20:43 -0400 (0:00:00.080) 0:02:58.334 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Wednesday 02 April 2025 12:20:44 -0400 (0:00:00.065) 0:02:58.400 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-basic-mysql", "__podman_quadlet_type": "volume", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Wednesday 02 April 2025 12:20:44 -0400 (0:00:00.091) 0:02:58.492 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:20:44 -0400 (0:00:00.131) 0:02:58.623 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:20:44 -0400 (0:00:00.078) 0:02:58.701 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:20:44 -0400 (0:00:00.083) 0:02:58.785 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_group": "1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:20:44 -0400 (0:00:00.087) 0:02:58.872 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610609.2783134, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1743610533.3987217, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986653, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "4263604762", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:20:44 -0400 (0:00:00.404) 0:02:59.277 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_basic" ], "delta": "0:00:00.003450", "end": "2025-04-02 12:20:45.275871", "rc": 0, "start": "2025-04-02 12:20:45.272421" } STDOUT: 0: user_quadlet_basic 100000 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:20:45 -0400 (0:00:00.410) 0:02:59.687 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_basic" ], "delta": "0:00:00.003385", "end": "2025-04-02 12:20:45.680182", "rc": 0, "start": "2025-04-02 12:20:45.676797" } STDOUT: 0: user_quadlet_basic 100000 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:20:45 -0400 (0:00:00.428) 0:03:00.116 ******* ok: [managed-node1] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_basic": { "range": 65536, "start": 100000 } }, "podman_subuid_info": { "user_quadlet_basic": { "range": 65536, "start": 100000 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:20:45 -0400 (0:00:00.178) 0:03:00.295 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:20:46 -0400 (0:00:00.066) 0:03:00.362 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:20:46 -0400 (0:00:00.070) 0:03:00.432 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:20:46 -0400 (0:00:00.070) 0:03:00.502 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:20:46 -0400 (0:00:00.064) 0:03:00.566 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Wednesday 02 April 2025 12:20:46 -0400 (0:00:00.060) 0:03:00.626 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-basic-mysql-volume.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_basic", "__podman_xdg_runtime_dir": "/run/user/1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Wednesday 02 April 2025 12:20:46 -0400 (0:00:00.089) 0:03:00.715 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_basic/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Wednesday 02 April 2025 12:20:46 -0400 (0:00:00.051) 0:03:00.767 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Wednesday 02 April 2025 12:20:46 -0400 (0:00:00.040) 0:03:00.808 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.volume", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Wednesday 02 April 2025 12:20:46 -0400 (0:00:00.095) 0:03:00.903 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Wednesday 02 April 2025 12:20:46 -0400 (0:00:00.053) 0:03:00.957 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Wednesday 02 April 2025 12:20:46 -0400 (0:00:00.128) 0:03:01.085 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610762.2496548, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1743610822.6816459, "dev": 36, "device_type": 0, "executable": true, "exists": true, "gid": 1111, "gr_name": "user_quadlet_basic", "inode": 28283, "isblk": false, "ischr": false, "isdir": true, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/directory", "mode": "0700", "mtime": 1743610822.6816459, "nlink": 8, "path": "/run/user/1111", "pw_name": "user_quadlet_basic", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 180, "uid": 1111, "version": null, "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": true } } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Wednesday 02 April 2025 12:20:47 -0400 (0:00:00.413) 0:03:01.499 ******* changed: [managed-node1] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-basic-mysql-volume.service", "state": "stopped", "status": { "ActiveEnterTimestamp": "Wed 2025-04-02 12:19:44 EDT", "ActiveEnterTimestampMonotonic": "54588170", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "run-user-1111.mount -.slice basic.target -.mount", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "yes", "AssertTimestamp": "Wed 2025-04-02 12:19:44 EDT", "AssertTimestampMonotonic": "54539742", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Wed 2025-04-02 12:19:44 EDT", "ConditionTimestampMonotonic": "54539741", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/user.slice/user-1111.slice/user@1111.service/quadlet-basic-mysql-volume.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "quadlet-basic-mysql-volume.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "ExecMainCode": "1", "ExecMainExitTimestamp": "Wed 2025-04-02 12:19:44 EDT", "ExecMainExitTimestampMonotonic": "54587250", "ExecMainPID": "6174", "ExecMainStartTimestamp": "Wed 2025-04-02 12:19:44 EDT", "ExecMainStartTimestampMonotonic": "54540399", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore quadlet-basic-mysql-name ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/run/user/1111/systemd/generator/quadlet-basic-mysql-volume.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "quadlet-basic-mysql-volume.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Wed 2025-04-02 12:19:44 EDT", "InactiveExitTimestampMonotonic": "54540434", "InvocationID": "79a8b612624043a484d6ee730f4eeff6", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "inherit", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "MemoryAccounting": "yes", "MemoryCurrent": "0", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "quadlet-basic-mysql-volume.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "-.slice basic.target", "RequiresMountsFor": "/run/user/1111/containers /home/user_quadlet_basic", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "-.slice", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Wed 2025-04-02 12:19:44 EDT", "StateChangeTimestampMonotonic": "54588170", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "exited", "SuccessAction": "none", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-basic-mysql-volume", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "0", "TasksMax": "22405", "TimeoutStartUSec": "infinity", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "generated", "UtmpMode": "init", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0", "WorkingDirectory": "!/home/user_quadlet_basic" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Wednesday 02 April 2025 12:20:47 -0400 (0:00:00.633) 0:03:02.133 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610783.9556422, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "90a3571bfc7670328fe3f8fb625585613dbd9c4a", "ctime": 1743610783.4406426, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 1111, "gr_name": "user_quadlet_basic", "inode": 274727108, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1743610783.1706429, "nlink": 1, "path": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.volume", "pw_name": "user_quadlet_basic", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 89, "uid": 1111, "version": "3512008263", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Wednesday 02 April 2025 12:20:48 -0400 (0:00:00.379) 0:03:02.512 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Wednesday 02 April 2025 12:20:48 -0400 (0:00:00.071) 0:03:02.584 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Wednesday 02 April 2025 12:20:48 -0400 (0:00:00.433) 0:03:03.018 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Wednesday 02 April 2025 12:20:48 -0400 (0:00:00.089) 0:03:03.107 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Wednesday 02 April 2025 12:20:48 -0400 (0:00:00.066) 0:03:03.174 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Wednesday 02 April 2025 12:20:48 -0400 (0:00:00.064) 0:03:03.239 ******* changed: [managed-node1] => { "changed": true, "path": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.volume", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Wednesday 02 April 2025 12:20:49 -0400 (0:00:00.438) 0:03:03.677 ******* ok: [managed-node1] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Wednesday 02 April 2025 12:20:49 -0400 (0:00:00.552) 0:03:04.230 ******* changed: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Wednesday 02 April 2025 12:20:50 -0400 (0:00:00.527) 0:03:04.757 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Wednesday 02 April 2025 12:20:50 -0400 (0:00:00.087) 0:03:04.844 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Wednesday 02 April 2025 12:20:50 -0400 (0:00:00.050) 0:03:04.895 ******* changed: [managed-node1] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.042700", "end": "2025-04-02 12:20:50.923772", "rc": 0, "start": "2025-04-02 12:20:50.881072" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Wednesday 02 April 2025 12:20:51 -0400 (0:00:00.449) 0:03:05.344 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Wednesday 02 April 2025 12:20:51 -0400 (0:00:00.124) 0:03:05.469 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Wednesday 02 April 2025 12:20:51 -0400 (0:00:00.055) 0:03:05.525 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Wednesday 02 April 2025 12:20:51 -0400 (0:00:00.052) 0:03:05.577 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_cancel_user_linger": [ "user_quadlet_basic" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Wednesday 02 April 2025 12:20:51 -0400 (0:00:00.051) 0:03:05.628 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.042325", "end": "2025-04-02 12:20:51.656237", "rc": 0, "start": "2025-04-02 12:20:51.613912" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Wednesday 02 April 2025 12:20:51 -0400 (0:00:00.440) 0:03:06.068 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.043398", "end": "2025-04-02 12:20:52.091326", "rc": 0, "start": "2025-04-02 12:20:52.047928" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Wednesday 02 April 2025 12:20:52 -0400 (0:00:00.434) 0:03:06.503 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.042271", "end": "2025-04-02 12:20:52.539496", "rc": 0, "start": "2025-04-02 12:20:52.497225" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Wednesday 02 April 2025 12:20:52 -0400 (0:00:00.449) 0:03:06.953 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.065093", "end": "2025-04-02 12:20:53.007524", "rc": 0, "start": "2025-04-02 12:20:52.942431" } STDOUT: podman quadlet-basic-name systemd-quadlet-basic-unused-network TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Wednesday 02 April 2025 12:20:53 -0400 (0:00:00.469) 0:03:07.423 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Wednesday 02 April 2025 12:20:53 -0400 (0:00:00.434) 0:03:07.857 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Wednesday 02 April 2025 12:20:53 -0400 (0:00:00.435) 0:03:08.293 ******* ok: [managed-node1] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "certmonger.service": { "name": "certmonger.service", "source": "systemd", "state": "running", "status": "enabled" }, "chrony-dnssrv@.service": { "name": "chrony-dnssrv@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cni-dhcp.service": { "name": "cni-dhcp.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "static" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus-org.freedesktop.portable1.service": { "name": "dbus-org.freedesktop.portable1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "running", "status": "static" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dnsmasq.service": { "name": "dnsmasq.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "unknown" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "halt-local.service": { "name": "halt-local.service", "source": "systemd", "state": "inactive", "status": "static" }, "import-state.service": { "name": "import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "iprdump.service": { "name": "iprdump.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprinit.service": { "name": "iprinit.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprupdate.service": { "name": "iprupdate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "loadmodules.service": { "name": "loadmodules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "messagebus.service": { "name": "messagebus.service", "source": "systemd", "state": "active", "status": "static" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-convert.service": { "name": "nfs-convert.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-halt.service": { "name": "plymouth-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-kexec.service": { "name": "plymouth-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-poweroff.service": { "name": "plymouth-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-quit.service": { "name": "plymouth-quit.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-read-write.service": { "name": "plymouth-read-write.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-reboot.service": { "name": "plymouth-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-switch-root-initramfs.service": { "name": "plymouth-switch-root-initramfs.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-switch-root.service": { "name": "plymouth-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "active", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-plymouth.service": { "name": "systemd-ask-password-plymouth.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-coredump@0.service": { "name": "systemd-coredump@0.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-portabled.service": { "name": "systemd-portabled.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-resolved.service": { "name": "systemd-resolved.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "masked" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "tcsd.service": { "name": "tcsd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "timedatex.service": { "name": "timedatex.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "tuned.service": { "name": "tuned.service", "source": "systemd", "state": "running", "status": "enabled" }, "unbound-anchor.service": { "name": "unbound-anchor.service", "source": "systemd", "state": "stopped", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "user-runtime-dir@1111.service": { "name": "user-runtime-dir@1111.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "unknown" }, "user@1111.service": { "name": "user@1111.service", "source": "systemd", "state": "running", "status": "unknown" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Wednesday 02 April 2025 12:20:55 -0400 (0:00:01.599) 0:03:09.892 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Wednesday 02 April 2025 12:20:55 -0400 (0:00:00.099) 0:03:09.992 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Network": {} }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Wednesday 02 April 2025 12:20:55 -0400 (0:00:00.051) 0:03:10.044 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_basic" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Wednesday 02 April 2025 12:20:55 -0400 (0:00:00.051) 0:03:10.095 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Wednesday 02 April 2025 12:20:55 -0400 (0:00:00.042) 0:03:10.137 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-basic-unused-network", "__podman_quadlet_type": "network", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Wednesday 02 April 2025 12:20:55 -0400 (0:00:00.055) 0:03:10.193 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:20:55 -0400 (0:00:00.076) 0:03:10.269 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:20:55 -0400 (0:00:00.051) 0:03:10.321 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:20:56 -0400 (0:00:00.047) 0:03:10.369 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_group": "1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:20:56 -0400 (0:00:00.056) 0:03:10.425 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610609.2783134, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1743610533.3987217, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986653, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "4263604762", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:20:56 -0400 (0:00:00.383) 0:03:10.809 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_basic" ], "delta": "0:00:00.003733", "end": "2025-04-02 12:20:56.782863", "rc": 0, "start": "2025-04-02 12:20:56.779130" } STDOUT: 0: user_quadlet_basic 100000 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:20:56 -0400 (0:00:00.384) 0:03:11.193 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_basic" ], "delta": "0:00:00.003621", "end": "2025-04-02 12:20:57.162916", "rc": 0, "start": "2025-04-02 12:20:57.159295" } STDOUT: 0: user_quadlet_basic 100000 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:20:57 -0400 (0:00:00.444) 0:03:11.638 ******* ok: [managed-node1] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_basic": { "range": 65536, "start": 100000 } }, "podman_subuid_info": { "user_quadlet_basic": { "range": 65536, "start": 100000 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:20:57 -0400 (0:00:00.059) 0:03:11.698 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:20:57 -0400 (0:00:00.043) 0:03:11.741 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:20:57 -0400 (0:00:00.043) 0:03:11.785 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:20:57 -0400 (0:00:00.042) 0:03:11.827 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:20:57 -0400 (0:00:00.052) 0:03:11.880 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Wednesday 02 April 2025 12:20:57 -0400 (0:00:00.043) 0:03:11.924 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-basic-unused-network-network.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_basic", "__podman_xdg_runtime_dir": "/run/user/1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Wednesday 02 April 2025 12:20:57 -0400 (0:00:00.073) 0:03:11.997 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_basic/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Wednesday 02 April 2025 12:20:57 -0400 (0:00:00.044) 0:03:12.041 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Wednesday 02 April 2025 12:20:57 -0400 (0:00:00.041) 0:03:12.083 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-network.network", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Wednesday 02 April 2025 12:20:57 -0400 (0:00:00.094) 0:03:12.178 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Wednesday 02 April 2025 12:20:57 -0400 (0:00:00.047) 0:03:12.225 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Wednesday 02 April 2025 12:20:57 -0400 (0:00:00.091) 0:03:12.316 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610762.2496548, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1743610822.6816459, "dev": 36, "device_type": 0, "executable": true, "exists": true, "gid": 1111, "gr_name": "user_quadlet_basic", "inode": 28283, "isblk": false, "ischr": false, "isdir": true, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/directory", "mode": "0700", "mtime": 1743610822.6816459, "nlink": 8, "path": "/run/user/1111", "pw_name": "user_quadlet_basic", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 180, "uid": 1111, "version": null, "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": true } } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Wednesday 02 April 2025 12:20:58 -0400 (0:00:00.448) 0:03:12.765 ******* changed: [managed-node1] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-basic-unused-network-network.service", "state": "stopped", "status": { "ActiveEnterTimestamp": "Wed 2025-04-02 12:19:38 EDT", "ActiveEnterTimestampMonotonic": "48462650", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "run-user-1111.mount basic.target -.mount -.slice", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "yes", "AssertTimestamp": "Wed 2025-04-02 12:19:38 EDT", "AssertTimestampMonotonic": "48386798", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Wed 2025-04-02 12:19:38 EDT", "ConditionTimestampMonotonic": "48386797", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/user.slice/user-1111.slice/user@1111.service/quadlet-basic-unused-network-network.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "quadlet-basic-unused-network-network.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "ExecMainCode": "1", "ExecMainExitTimestamp": "Wed 2025-04-02 12:19:38 EDT", "ExecMainExitTimestampMonotonic": "48462437", "ExecMainPID": "5032", "ExecMainStartTimestamp": "Wed 2025-04-02 12:19:38 EDT", "ExecMainStartTimestampMonotonic": "48387458", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore systemd-quadlet-basic-unused-network ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/run/user/1111/systemd/generator/quadlet-basic-unused-network-network.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "quadlet-basic-unused-network-network.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Wed 2025-04-02 12:19:38 EDT", "InactiveExitTimestampMonotonic": "48387490", "InvocationID": "67526387fd92472583a97afbbd0afd41", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "inherit", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "MemoryAccounting": "yes", "MemoryCurrent": "0", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "quadlet-basic-unused-network-network.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "-.slice basic.target", "RequiresMountsFor": "/run/user/1111/containers /home/user_quadlet_basic", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "-.slice", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Wed 2025-04-02 12:19:38 EDT", "StateChangeTimestampMonotonic": "48462650", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "exited", "SuccessAction": "none", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-basic-unused-network-network", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "0", "TasksMax": "22405", "TimeoutStartUSec": "infinity", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "generated", "UtmpMode": "init", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0", "WorkingDirectory": "!/home/user_quadlet_basic" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Wednesday 02 April 2025 12:20:58 -0400 (0:00:00.569) 0:03:13.334 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610777.756646, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "52c9d75ecaf81203cc1f1a3b1dd00fcd25067b01", "ctime": 1743610777.2306461, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 1111, "gr_name": "user_quadlet_basic", "inode": 234881218, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1743610776.9566462, "nlink": 1, "path": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-network.network", "pw_name": "user_quadlet_basic", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 54, "uid": 1111, "version": "2503925155", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Wednesday 02 April 2025 12:20:59 -0400 (0:00:00.404) 0:03:13.738 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Wednesday 02 April 2025 12:20:59 -0400 (0:00:00.075) 0:03:13.814 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Wednesday 02 April 2025 12:20:59 -0400 (0:00:00.390) 0:03:14.204 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Wednesday 02 April 2025 12:20:59 -0400 (0:00:00.065) 0:03:14.270 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Wednesday 02 April 2025 12:20:59 -0400 (0:00:00.058) 0:03:14.328 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Wednesday 02 April 2025 12:21:00 -0400 (0:00:00.049) 0:03:14.378 ******* changed: [managed-node1] => { "changed": true, "path": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-network.network", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Wednesday 02 April 2025 12:21:00 -0400 (0:00:00.437) 0:03:14.815 ******* ok: [managed-node1] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Wednesday 02 April 2025 12:21:01 -0400 (0:00:00.554) 0:03:15.370 ******* changed: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Wednesday 02 April 2025 12:21:01 -0400 (0:00:00.527) 0:03:15.898 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Wednesday 02 April 2025 12:21:01 -0400 (0:00:00.067) 0:03:15.966 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Wednesday 02 April 2025 12:21:01 -0400 (0:00:00.041) 0:03:16.007 ******* changed: [managed-node1] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.045218", "end": "2025-04-02 12:21:02.041374", "rc": 0, "start": "2025-04-02 12:21:01.996156" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Wednesday 02 April 2025 12:21:02 -0400 (0:00:00.474) 0:03:16.482 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Wednesday 02 April 2025 12:21:02 -0400 (0:00:00.091) 0:03:16.574 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Wednesday 02 April 2025 12:21:02 -0400 (0:00:00.045) 0:03:16.619 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Wednesday 02 April 2025 12:21:02 -0400 (0:00:00.043) 0:03:16.663 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_cancel_user_linger": [ "user_quadlet_basic" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Wednesday 02 April 2025 12:21:02 -0400 (0:00:00.049) 0:03:16.712 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.043362", "end": "2025-04-02 12:21:02.741422", "rc": 0, "start": "2025-04-02 12:21:02.698060" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Wednesday 02 April 2025 12:21:02 -0400 (0:00:00.459) 0:03:17.171 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.043908", "end": "2025-04-02 12:21:03.216332", "rc": 0, "start": "2025-04-02 12:21:03.172424" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Wednesday 02 April 2025 12:21:03 -0400 (0:00:00.466) 0:03:17.638 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.041111", "end": "2025-04-02 12:21:03.696263", "rc": 0, "start": "2025-04-02 12:21:03.655152" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Wednesday 02 April 2025 12:21:03 -0400 (0:00:00.499) 0:03:18.138 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.053211", "end": "2025-04-02 12:21:04.186504", "rc": 0, "start": "2025-04-02 12:21:04.133293" } STDOUT: podman quadlet-basic-name TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Wednesday 02 April 2025 12:21:04 -0400 (0:00:00.467) 0:03:18.605 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Wednesday 02 April 2025 12:21:04 -0400 (0:00:00.445) 0:03:19.051 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Wednesday 02 April 2025 12:21:05 -0400 (0:00:00.443) 0:03:19.494 ******* ok: [managed-node1] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "certmonger.service": { "name": "certmonger.service", "source": "systemd", "state": "running", "status": "enabled" }, "chrony-dnssrv@.service": { "name": "chrony-dnssrv@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cni-dhcp.service": { "name": "cni-dhcp.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "static" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus-org.freedesktop.portable1.service": { "name": "dbus-org.freedesktop.portable1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "running", "status": "static" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dnsmasq.service": { "name": "dnsmasq.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "unknown" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "halt-local.service": { "name": "halt-local.service", "source": "systemd", "state": "inactive", "status": "static" }, "import-state.service": { "name": "import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "iprdump.service": { "name": "iprdump.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprinit.service": { "name": "iprinit.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprupdate.service": { "name": "iprupdate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "loadmodules.service": { "name": "loadmodules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "messagebus.service": { "name": "messagebus.service", "source": "systemd", "state": "active", "status": "static" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-convert.service": { "name": "nfs-convert.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-halt.service": { "name": "plymouth-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-kexec.service": { "name": "plymouth-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-poweroff.service": { "name": "plymouth-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-quit.service": { "name": "plymouth-quit.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-read-write.service": { "name": "plymouth-read-write.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-reboot.service": { "name": "plymouth-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-switch-root-initramfs.service": { "name": "plymouth-switch-root-initramfs.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-switch-root.service": { "name": "plymouth-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "active", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-plymouth.service": { "name": "systemd-ask-password-plymouth.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-coredump@0.service": { "name": "systemd-coredump@0.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-portabled.service": { "name": "systemd-portabled.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-resolved.service": { "name": "systemd-resolved.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "masked" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "tcsd.service": { "name": "tcsd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "timedatex.service": { "name": "timedatex.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "tuned.service": { "name": "tuned.service", "source": "systemd", "state": "running", "status": "enabled" }, "unbound-anchor.service": { "name": "unbound-anchor.service", "source": "systemd", "state": "stopped", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "user-runtime-dir@1111.service": { "name": "user-runtime-dir@1111.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "unknown" }, "user@1111.service": { "name": "user@1111.service", "source": "systemd", "state": "running", "status": "unknown" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Wednesday 02 April 2025 12:21:06 -0400 (0:00:01.646) 0:03:21.140 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Wednesday 02 April 2025 12:21:06 -0400 (0:00:00.040) 0:03:21.181 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Network]\nSubnet=192.168.29.0/24\nGateway=192.168.29.1\nLabel=app=wordpress\nNetworkName=quadlet-basic-name\n", "__podman_quadlet_template_src": "templates/quadlet-basic.network.j2" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Wednesday 02 April 2025 12:21:06 -0400 (0:00:00.126) 0:03:21.308 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_basic" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Wednesday 02 April 2025 12:21:07 -0400 (0:00:00.048) 0:03:21.356 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Wednesday 02 April 2025 12:21:07 -0400 (0:00:00.043) 0:03:21.400 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-basic", "__podman_quadlet_type": "network", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Wednesday 02 April 2025 12:21:07 -0400 (0:00:00.057) 0:03:21.457 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:21:07 -0400 (0:00:00.072) 0:03:21.530 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:21:07 -0400 (0:00:00.049) 0:03:21.580 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:21:07 -0400 (0:00:00.048) 0:03:21.628 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_group": "1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:21:07 -0400 (0:00:00.052) 0:03:21.681 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610609.2783134, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1743610533.3987217, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986653, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "4263604762", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:21:07 -0400 (0:00:00.429) 0:03:22.110 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_basic" ], "delta": "0:00:00.003376", "end": "2025-04-02 12:21:08.091339", "rc": 0, "start": "2025-04-02 12:21:08.087963" } STDOUT: 0: user_quadlet_basic 100000 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:21:08 -0400 (0:00:00.404) 0:03:22.515 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_basic" ], "delta": "0:00:00.003392", "end": "2025-04-02 12:21:08.602105", "rc": 0, "start": "2025-04-02 12:21:08.598713" } STDOUT: 0: user_quadlet_basic 100000 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:21:08 -0400 (0:00:00.520) 0:03:23.035 ******* ok: [managed-node1] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_basic": { "range": 65536, "start": 100000 } }, "podman_subuid_info": { "user_quadlet_basic": { "range": 65536, "start": 100000 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:21:08 -0400 (0:00:00.081) 0:03:23.116 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:21:08 -0400 (0:00:00.074) 0:03:23.191 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:21:08 -0400 (0:00:00.067) 0:03:23.259 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:21:08 -0400 (0:00:00.053) 0:03:23.312 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:21:09 -0400 (0:00:00.055) 0:03:23.367 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Wednesday 02 April 2025 12:21:09 -0400 (0:00:00.044) 0:03:23.411 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-basic-network.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_basic", "__podman_xdg_runtime_dir": "/run/user/1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Wednesday 02 April 2025 12:21:09 -0400 (0:00:00.069) 0:03:23.481 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_basic/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Wednesday 02 April 2025 12:21:09 -0400 (0:00:00.046) 0:03:23.527 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Wednesday 02 April 2025 12:21:09 -0400 (0:00:00.053) 0:03:23.581 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic.network", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Wednesday 02 April 2025 12:21:09 -0400 (0:00:00.139) 0:03:23.720 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Wednesday 02 April 2025 12:21:09 -0400 (0:00:00.070) 0:03:23.791 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Wednesday 02 April 2025 12:21:09 -0400 (0:00:00.117) 0:03:23.908 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610762.2496548, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1743610822.6816459, "dev": 36, "device_type": 0, "executable": true, "exists": true, "gid": 1111, "gr_name": "user_quadlet_basic", "inode": 28283, "isblk": false, "ischr": false, "isdir": true, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/directory", "mode": "0700", "mtime": 1743610822.6816459, "nlink": 8, "path": "/run/user/1111", "pw_name": "user_quadlet_basic", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 180, "uid": 1111, "version": null, "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": true } } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Wednesday 02 April 2025 12:21:10 -0400 (0:00:00.498) 0:03:24.406 ******* changed: [managed-node1] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-basic-network.service", "state": "stopped", "status": { "ActiveEnterTimestamp": "Wed 2025-04-02 12:19:32 EDT", "ActiveEnterTimestampMonotonic": "42564968", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "run-user-1111.mount -.slice basic.target -.mount", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "yes", "AssertTimestamp": "Wed 2025-04-02 12:19:32 EDT", "AssertTimestampMonotonic": "42525997", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Wed 2025-04-02 12:19:32 EDT", "ConditionTimestampMonotonic": "42525995", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/user.slice/user-1111.slice/user@1111.service/quadlet-basic-network.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "quadlet-basic-network.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "ExecMainCode": "1", "ExecMainExitTimestamp": "Wed 2025-04-02 12:19:32 EDT", "ExecMainExitTimestampMonotonic": "42564773", "ExecMainPID": "3913", "ExecMainStartTimestamp": "Wed 2025-04-02 12:19:32 EDT", "ExecMainStartTimestampMonotonic": "42526686", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore --subnet=192.168.29.0/24 --gateway=192.168.29.1 --label app=wordpress quadlet-basic-name ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/run/user/1111/systemd/generator/quadlet-basic-network.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "quadlet-basic-network.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Wed 2025-04-02 12:19:32 EDT", "InactiveExitTimestampMonotonic": "42526717", "InvocationID": "ea65e34c831a437397061d754e67fb54", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "inherit", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "MemoryAccounting": "yes", "MemoryCurrent": "0", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "quadlet-basic-network.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "-.slice basic.target", "RequiresMountsFor": "/run/user/1111/containers /home/user_quadlet_basic", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "-.slice", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Wed 2025-04-02 12:19:32 EDT", "StateChangeTimestampMonotonic": "42564968", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "exited", "SuccessAction": "none", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-basic-network", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "0", "TasksMax": "22405", "TimeoutStartUSec": "infinity", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "generated", "UtmpMode": "init", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0", "WorkingDirectory": "!/home/user_quadlet_basic" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Wednesday 02 April 2025 12:21:10 -0400 (0:00:00.662) 0:03:25.069 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610771.9236493, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "19c9b17be2af9b9deca5c3bd327f048966750682", "ctime": 1743610771.2676497, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 1111, "gr_name": "user_quadlet_basic", "inode": 195035332, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1743610770.9736497, "nlink": 1, "path": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic.network", "pw_name": "user_quadlet_basic", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 105, "uid": 1111, "version": "1178211533", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Wednesday 02 April 2025 12:21:11 -0400 (0:00:00.448) 0:03:25.518 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Wednesday 02 April 2025 12:21:11 -0400 (0:00:00.118) 0:03:25.637 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Wednesday 02 April 2025 12:21:11 -0400 (0:00:00.394) 0:03:26.031 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Wednesday 02 April 2025 12:21:11 -0400 (0:00:00.057) 0:03:26.089 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Wednesday 02 April 2025 12:21:11 -0400 (0:00:00.056) 0:03:26.146 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Wednesday 02 April 2025 12:21:11 -0400 (0:00:00.060) 0:03:26.207 ******* changed: [managed-node1] => { "changed": true, "path": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic.network", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Wednesday 02 April 2025 12:21:12 -0400 (0:00:00.408) 0:03:26.615 ******* ok: [managed-node1] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Wednesday 02 April 2025 12:21:12 -0400 (0:00:00.570) 0:03:27.186 ******* changed: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Wednesday 02 April 2025 12:21:13 -0400 (0:00:00.465) 0:03:27.652 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Wednesday 02 April 2025 12:21:13 -0400 (0:00:00.059) 0:03:27.711 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Wednesday 02 April 2025 12:21:13 -0400 (0:00:00.040) 0:03:27.751 ******* changed: [managed-node1] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.042760", "end": "2025-04-02 12:21:13.780881", "rc": 0, "start": "2025-04-02 12:21:13.738121" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Wednesday 02 April 2025 12:21:13 -0400 (0:00:00.452) 0:03:28.203 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Wednesday 02 April 2025 12:21:13 -0400 (0:00:00.073) 0:03:28.277 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Wednesday 02 April 2025 12:21:13 -0400 (0:00:00.042) 0:03:28.319 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Wednesday 02 April 2025 12:21:14 -0400 (0:00:00.044) 0:03:28.363 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_cancel_user_linger": [ "user_quadlet_basic" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Wednesday 02 April 2025 12:21:14 -0400 (0:00:00.048) 0:03:28.412 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.042781", "end": "2025-04-02 12:21:14.444432", "rc": 0, "start": "2025-04-02 12:21:14.401651" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Wednesday 02 April 2025 12:21:14 -0400 (0:00:00.448) 0:03:28.860 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.041856", "end": "2025-04-02 12:21:14.972403", "rc": 0, "start": "2025-04-02 12:21:14.930547" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Wednesday 02 April 2025 12:21:15 -0400 (0:00:00.526) 0:03:29.386 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.040942", "end": "2025-04-02 12:21:15.429268", "rc": 0, "start": "2025-04-02 12:21:15.388326" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Wednesday 02 April 2025 12:21:15 -0400 (0:00:00.466) 0:03:29.853 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.041162", "end": "2025-04-02 12:21:15.884326", "rc": 0, "start": "2025-04-02 12:21:15.843164" } STDOUT: podman TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Wednesday 02 April 2025 12:21:15 -0400 (0:00:00.467) 0:03:30.320 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Wednesday 02 April 2025 12:21:16 -0400 (0:00:00.462) 0:03:30.783 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Wednesday 02 April 2025 12:21:16 -0400 (0:00:00.504) 0:03:31.288 ******* ok: [managed-node1] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "certmonger.service": { "name": "certmonger.service", "source": "systemd", "state": "running", "status": "enabled" }, "chrony-dnssrv@.service": { "name": "chrony-dnssrv@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cni-dhcp.service": { "name": "cni-dhcp.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "static" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus-org.freedesktop.portable1.service": { "name": "dbus-org.freedesktop.portable1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "running", "status": "static" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dnsmasq.service": { "name": "dnsmasq.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "unknown" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "halt-local.service": { "name": "halt-local.service", "source": "systemd", "state": "inactive", "status": "static" }, "import-state.service": { "name": "import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "iprdump.service": { "name": "iprdump.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprinit.service": { "name": "iprinit.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprupdate.service": { "name": "iprupdate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "loadmodules.service": { "name": "loadmodules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "messagebus.service": { "name": "messagebus.service", "source": "systemd", "state": "active", "status": "static" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-convert.service": { "name": "nfs-convert.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-halt.service": { "name": "plymouth-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-kexec.service": { "name": "plymouth-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-poweroff.service": { "name": "plymouth-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-quit.service": { "name": "plymouth-quit.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-read-write.service": { "name": "plymouth-read-write.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-reboot.service": { "name": "plymouth-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-switch-root-initramfs.service": { "name": "plymouth-switch-root-initramfs.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-switch-root.service": { "name": "plymouth-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "active", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-plymouth.service": { "name": "systemd-ask-password-plymouth.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-coredump@0.service": { "name": "systemd-coredump@0.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-portabled.service": { "name": "systemd-portabled.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-resolved.service": { "name": "systemd-resolved.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "masked" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "tcsd.service": { "name": "tcsd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "timedatex.service": { "name": "timedatex.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "tuned.service": { "name": "tuned.service", "source": "systemd", "state": "running", "status": "enabled" }, "unbound-anchor.service": { "name": "unbound-anchor.service", "source": "systemd", "state": "stopped", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "user-runtime-dir@1111.service": { "name": "user-runtime-dir@1111.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "unknown" }, "user@1111.service": { "name": "user@1111.service", "source": "systemd", "state": "running", "status": "unknown" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Wednesday 02 April 2025 12:21:18 -0400 (0:00:01.617) 0:03:32.905 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:198 Wednesday 02 April 2025 12:21:18 -0400 (0:00:00.140) 0:03:33.046 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:4 Wednesday 02 April 2025 12:21:18 -0400 (0:00:00.191) 0:03:33.238 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set cancel linger vars] *************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:11 Wednesday 02 April 2025 12:21:18 -0400 (0:00:00.077) 0:03:33.315 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_xdg_runtime_dir": "/run/user/1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:16 Wednesday 02 April 2025 12:21:19 -0400 (0:00:00.096) 0:03:33.412 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610762.2496548, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1743610822.6816459, "dev": 36, "device_type": 0, "executable": true, "exists": true, "gid": 1111, "gr_name": "user_quadlet_basic", "inode": 28283, "isblk": false, "ischr": false, "isdir": true, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/directory", "mode": "0700", "mtime": 1743610822.6816459, "nlink": 8, "path": "/run/user/1111", "pw_name": "user_quadlet_basic", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 180, "uid": 1111, "version": null, "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": true } } TASK [fedora.linux_system_roles.podman : Gather facts for containers] ********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:21 Wednesday 02 April 2025 12:21:19 -0400 (0:00:00.433) 0:03:33.845 ******* ok: [managed-node1] => { "changed": false, "containers": [] } TASK [fedora.linux_system_roles.podman : Gather facts for networks] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:30 Wednesday 02 April 2025 12:21:20 -0400 (0:00:00.689) 0:03:34.535 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "network", "ls", "-q" ], "delta": "0:00:00.042038", "end": "2025-04-02 12:21:20.553469", "rc": 0, "start": "2025-04-02 12:21:20.511431" } STDOUT: podman TASK [fedora.linux_system_roles.podman : Gather secrets] *********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:40 Wednesday 02 April 2025 12:21:20 -0400 (0:00:00.454) 0:03:34.990 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "secret", "ls", "-n", "-q" ], "delta": "0:00:00.041463", "end": "2025-04-02 12:21:21.049383", "rc": 0, "start": "2025-04-02 12:21:21.007920" } TASK [fedora.linux_system_roles.podman : Cancel linger if no more resources are in use] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:50 Wednesday 02 April 2025 12:21:21 -0400 (0:00:00.497) 0:03:35.487 ******* changed: [managed-node1] => { "changed": true, "cmd": [ "loginctl", "disable-linger", "user_quadlet_basic" ], "delta": "0:00:00.007543", "end": "2025-04-02 12:21:21.502674", "rc": 0, "start": "2025-04-02 12:21:21.495131" } TASK [fedora.linux_system_roles.podman : Wait for user session to exit closing state] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:62 Wednesday 02 April 2025 12:21:21 -0400 (0:00:00.460) 0:03:35.947 ******* ok: [managed-node1] => { "attempts": 1, "changed": false, "cmd": [ "loginctl", "show-user", "--value", "-p", "State", "user_quadlet_basic" ], "delta": "0:00:00.006701", "end": "2025-04-02 12:21:21.941740", "failed_when_result": false, "rc": 1, "start": "2025-04-02 12:21:21.935039" } STDERR: Failed to get user: User ID 1111 is not logged in or lingering MSG: non-zero return code TASK [fedora.linux_system_roles.podman : Stop logind] ************************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:82 Wednesday 02 April 2025 12:21:22 -0400 (0:00:00.410) 0:03:36.358 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Wait for user session to exit closing state] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:87 Wednesday 02 April 2025 12:21:22 -0400 (0:00:00.050) 0:03:36.409 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Restart logind] *********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:98 Wednesday 02 April 2025 12:21:22 -0400 (0:00:00.042) 0:03:36.452 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:204 Wednesday 02 April 2025 12:21:22 -0400 (0:00:00.052) 0:03:36.504 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:213 Wednesday 02 April 2025 12:21:22 -0400 (0:00:00.061) 0:03:36.566 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Ensure no resources] ***************************************************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:230 Wednesday 02 April 2025 12:21:22 -0400 (0:00:00.066) 0:03:36.632 ******* ok: [managed-node1] => { "changed": false } MSG: All assertions passed TASK [Ensure no linger] ******************************************************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:240 Wednesday 02 April 2025 12:21:22 -0400 (0:00:00.099) 0:03:36.732 ******* ok: [managed-node1] => { "changed": false, "failed_when_result": false, "stat": { "exists": false } } TASK [Run the role - root] ***************************************************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:246 Wednesday 02 April 2025 12:21:22 -0400 (0:00:00.428) 0:03:37.161 ******* TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Wednesday 02 April 2025 12:21:23 -0400 (0:00:00.196) 0:03:37.357 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Wednesday 02 April 2025 12:21:23 -0400 (0:00:00.125) 0:03:37.483 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Wednesday 02 April 2025 12:21:23 -0400 (0:00:00.078) 0:03:37.562 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Wednesday 02 April 2025 12:21:23 -0400 (0:00:00.158) 0:03:37.720 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Wednesday 02 April 2025 12:21:23 -0400 (0:00:00.065) 0:03:37.786 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Wednesday 02 April 2025 12:21:23 -0400 (0:00:00.052) 0:03:37.838 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Wednesday 02 April 2025 12:21:23 -0400 (0:00:00.050) 0:03:37.889 ******* ok: [managed-node1] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node1] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node1] => (item=CentOS_8.yml) => { "ansible_facts": { "__podman_packages": [ "crun", "podman", "podman-plugins", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [managed-node1] => (item=CentOS_8.yml) => { "ansible_facts": { "__podman_packages": [ "crun", "podman", "podman-plugins", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Wednesday 02 April 2025 12:21:23 -0400 (0:00:00.117) 0:03:38.006 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Wednesday 02 April 2025 12:21:25 -0400 (0:00:01.544) 0:03:39.550 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Wednesday 02 April 2025 12:21:25 -0400 (0:00:00.071) 0:03:39.621 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Wednesday 02 April 2025 12:21:25 -0400 (0:00:00.082) 0:03:39.703 ******* skipping: [managed-node1] => {} TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Wednesday 02 April 2025 12:21:25 -0400 (0:00:00.063) 0:03:39.767 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Wednesday 02 April 2025 12:21:25 -0400 (0:00:00.137) 0:03:39.905 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Wednesday 02 April 2025 12:21:25 -0400 (0:00:00.066) 0:03:39.971 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.028856", "end": "2025-04-02 12:21:25.995005", "rc": 0, "start": "2025-04-02 12:21:25.966149" } STDOUT: podman version 4.9.4-dev TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Wednesday 02 April 2025 12:21:26 -0400 (0:00:00.466) 0:03:40.437 ******* ok: [managed-node1] => { "ansible_facts": { "podman_version": "4.9.4-dev" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Wednesday 02 April 2025 12:21:26 -0400 (0:00:00.068) 0:03:40.506 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Wednesday 02 April 2025 12:21:26 -0400 (0:00:00.081) 0:03:40.587 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } META: end_host conditional evaluated to false, continuing execution for managed-node1 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Wednesday 02 April 2025 12:21:26 -0400 (0:00:00.190) 0:03:40.778 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } META: end_host conditional evaluated to false, continuing execution for managed-node1 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Wednesday 02 April 2025 12:21:26 -0400 (0:00:00.125) 0:03:40.903 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:21:26 -0400 (0:00:00.080) 0:03:40.983 ******* ok: [managed-node1] => { "ansible_facts": { "getent_passwd": { "root": [ "x", "0", "0", "root", "/root", "/bin/bash" ] } }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:21:27 -0400 (0:00:00.389) 0:03:41.373 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:21:27 -0400 (0:00:00.049) 0:03:41.422 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:21:27 -0400 (0:00:00.121) 0:03:41.544 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610609.2783134, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1743610533.3987217, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986653, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "4263604762", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:21:27 -0400 (0:00:00.414) 0:03:41.958 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:21:27 -0400 (0:00:00.072) 0:03:42.031 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:21:27 -0400 (0:00:00.069) 0:03:42.100 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:21:27 -0400 (0:00:00.060) 0:03:42.161 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:21:27 -0400 (0:00:00.048) 0:03:42.210 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:21:27 -0400 (0:00:00.054) 0:03:42.265 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:21:27 -0400 (0:00:00.050) 0:03:42.316 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:21:28 -0400 (0:00:00.047) 0:03:42.363 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Wednesday 02 April 2025 12:21:28 -0400 (0:00:00.042) 0:03:42.405 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_parent_mode": "0755", "__podman_parent_path": "/etc/containers", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:126 Wednesday 02 April 2025 12:21:28 -0400 (0:00:00.053) 0:03:42.459 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Wednesday 02 April 2025 12:21:28 -0400 (0:00:00.073) 0:03:42.532 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Wednesday 02 April 2025 12:21:28 -0400 (0:00:00.039) 0:03:42.572 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:129 Wednesday 02 April 2025 12:21:28 -0400 (0:00:00.051) 0:03:42.623 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Wednesday 02 April 2025 12:21:28 -0400 (0:00:00.145) 0:03:42.769 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Wednesday 02 April 2025 12:21:28 -0400 (0:00:00.064) 0:03:42.834 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:132 Wednesday 02 April 2025 12:21:28 -0400 (0:00:00.049) 0:03:42.883 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:7 Wednesday 02 April 2025 12:21:28 -0400 (0:00:00.098) 0:03:42.982 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:15 Wednesday 02 April 2025 12:21:28 -0400 (0:00:00.045) 0:03:43.027 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:135 Wednesday 02 April 2025 12:21:28 -0400 (0:00:00.041) 0:03:43.069 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:8 Wednesday 02 April 2025 12:21:28 -0400 (0:00:00.077) 0:03:43.147 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:16 Wednesday 02 April 2025 12:21:28 -0400 (0:00:00.040) 0:03:43.187 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:21 Wednesday 02 April 2025 12:21:28 -0400 (0:00:00.039) 0:03:43.227 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:27 Wednesday 02 April 2025 12:21:28 -0400 (0:00:00.040) 0:03:43.267 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:141 Wednesday 02 April 2025 12:21:28 -0400 (0:00:00.040) 0:03:43.308 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:148 Wednesday 02 April 2025 12:21:29 -0400 (0:00:00.039) 0:03:43.348 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:155 Wednesday 02 April 2025 12:21:29 -0400 (0:00:00.046) 0:03:43.394 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:159 Wednesday 02 April 2025 12:21:29 -0400 (0:00:00.144) 0:03:43.538 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:168 Wednesday 02 April 2025 12:21:29 -0400 (0:00:00.064) 0:03:43.603 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:177 Wednesday 02 April 2025 12:21:29 -0400 (0:00:00.067) 0:03:43.671 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node1 included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Wednesday 02 April 2025 12:21:29 -0400 (0:00:00.204) 0:03:43.875 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Wednesday 02 April 2025 12:21:29 -0400 (0:00:00.066) 0:03:43.942 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:21:29 -0400 (0:00:00.119) 0:03:44.061 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:21:29 -0400 (0:00:00.078) 0:03:44.140 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:21:29 -0400 (0:00:00.073) 0:03:44.213 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:21:29 -0400 (0:00:00.067) 0:03:44.281 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:21:29 -0400 (0:00:00.049) 0:03:44.331 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:21:30 -0400 (0:00:00.047) 0:03:44.379 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:21:30 -0400 (0:00:00.041) 0:03:44.420 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:21:30 -0400 (0:00:00.040) 0:03:44.460 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:21:30 -0400 (0:00:00.041) 0:03:44.502 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:21:30 -0400 (0:00:00.134) 0:03:44.637 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:21:30 -0400 (0:00:00.064) 0:03:44.701 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:21:30 -0400 (0:00:00.079) 0:03:44.780 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14 Wednesday 02 April 2025 12:21:30 -0400 (0:00:00.068) 0:03:44.849 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_rootless": false, "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20 Wednesday 02 April 2025 12:21:30 -0400 (0:00:00.085) 0:03:44.934 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Wednesday 02 April 2025 12:21:30 -0400 (0:00:00.121) 0:03:45.056 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Wednesday 02 April 2025 12:21:30 -0400 (0:00:00.067) 0:03:45.124 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Wednesday 02 April 2025 12:21:30 -0400 (0:00:00.063) 0:03:45.187 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25 Wednesday 02 April 2025 12:21:30 -0400 (0:00:00.071) 0:03:45.259 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41 Wednesday 02 April 2025 12:21:30 -0400 (0:00:00.069) 0:03:45.329 ******* [WARNING]: Using a variable for a task's 'args' is unsafe in some situations (see https://docs.ansible.com/ansible/devel/reference_appendices/faq.html#argsplat- unsafe) changed: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Wednesday 02 April 2025 12:21:31 -0400 (0:00:00.652) 0:03:45.981 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Wednesday 02 April 2025 12:21:31 -0400 (0:00:00.050) 0:03:46.032 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:21:31 -0400 (0:00:00.086) 0:03:46.118 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:21:31 -0400 (0:00:00.147) 0:03:46.266 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:21:31 -0400 (0:00:00.070) 0:03:46.336 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:21:32 -0400 (0:00:00.077) 0:03:46.414 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:21:32 -0400 (0:00:00.049) 0:03:46.463 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:21:32 -0400 (0:00:00.050) 0:03:46.514 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:21:32 -0400 (0:00:00.047) 0:03:46.561 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:21:32 -0400 (0:00:00.040) 0:03:46.602 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:21:32 -0400 (0:00:00.042) 0:03:46.645 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:21:32 -0400 (0:00:00.042) 0:03:46.688 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:21:32 -0400 (0:00:00.048) 0:03:46.736 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:21:32 -0400 (0:00:00.053) 0:03:46.790 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14 Wednesday 02 April 2025 12:21:32 -0400 (0:00:00.065) 0:03:46.855 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_rootless": false, "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20 Wednesday 02 April 2025 12:21:32 -0400 (0:00:00.059) 0:03:46.915 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Wednesday 02 April 2025 12:21:32 -0400 (0:00:00.075) 0:03:46.990 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Wednesday 02 April 2025 12:21:32 -0400 (0:00:00.041) 0:03:47.031 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Wednesday 02 April 2025 12:21:32 -0400 (0:00:00.129) 0:03:47.161 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25 Wednesday 02 April 2025 12:21:32 -0400 (0:00:00.074) 0:03:47.235 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41 Wednesday 02 April 2025 12:21:32 -0400 (0:00:00.068) 0:03:47.304 ******* [WARNING]: Using a variable for a task's 'args' is unsafe in some situations (see https://docs.ansible.com/ansible/devel/reference_appendices/faq.html#argsplat- unsafe) changed: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:184 Wednesday 02 April 2025 12:21:33 -0400 (0:00:00.555) 0:03:47.859 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:191 Wednesday 02 April 2025 12:21:33 -0400 (0:00:00.039) 0:03:47.898 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Wednesday 02 April 2025 12:21:33 -0400 (0:00:00.193) 0:03:48.091 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Network]\nSubnet=192.168.29.0/24\nGateway=192.168.29.1\nLabel=app=wordpress\nNetworkName=quadlet-basic-name\n", "__podman_quadlet_template_src": "templates/quadlet-basic.network.j2" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Wednesday 02 April 2025 12:21:33 -0400 (0:00:00.154) 0:03:48.245 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Wednesday 02 April 2025 12:21:33 -0400 (0:00:00.052) 0:03:48.298 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Wednesday 02 April 2025 12:21:34 -0400 (0:00:00.045) 0:03:48.344 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-basic", "__podman_quadlet_type": "network", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Wednesday 02 April 2025 12:21:34 -0400 (0:00:00.060) 0:03:48.404 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:21:34 -0400 (0:00:00.076) 0:03:48.480 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:21:34 -0400 (0:00:00.177) 0:03:48.658 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:21:34 -0400 (0:00:00.082) 0:03:48.740 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:21:34 -0400 (0:00:00.089) 0:03:48.830 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610609.2783134, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1743610533.3987217, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986653, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "4263604762", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:21:34 -0400 (0:00:00.464) 0:03:49.295 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:21:35 -0400 (0:00:00.059) 0:03:49.355 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:21:35 -0400 (0:00:00.055) 0:03:49.410 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:21:35 -0400 (0:00:00.056) 0:03:49.467 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:21:35 -0400 (0:00:00.062) 0:03:49.529 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:21:35 -0400 (0:00:00.063) 0:03:49.593 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:21:35 -0400 (0:00:00.046) 0:03:49.640 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:21:35 -0400 (0:00:00.056) 0:03:49.696 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Wednesday 02 April 2025 12:21:35 -0400 (0:00:00.068) 0:03:49.765 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-basic-network.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Wednesday 02 April 2025 12:21:35 -0400 (0:00:00.117) 0:03:49.882 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Wednesday 02 April 2025 12:21:35 -0400 (0:00:00.083) 0:03:49.966 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Wednesday 02 April 2025 12:21:35 -0400 (0:00:00.064) 0:03:50.031 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-basic.network", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Wednesday 02 April 2025 12:21:35 -0400 (0:00:00.229) 0:03:50.260 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Wednesday 02 April 2025 12:21:35 -0400 (0:00:00.074) 0:03:50.335 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Wednesday 02 April 2025 12:21:36 -0400 (0:00:00.068) 0:03:50.403 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Wednesday 02 April 2025 12:21:36 -0400 (0:00:00.144) 0:03:50.548 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Wednesday 02 April 2025 12:21:36 -0400 (0:00:00.116) 0:03:50.664 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Wednesday 02 April 2025 12:21:36 -0400 (0:00:00.068) 0:03:50.733 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Wednesday 02 April 2025 12:21:36 -0400 (0:00:00.066) 0:03:50.799 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Wednesday 02 April 2025 12:21:36 -0400 (0:00:00.067) 0:03:50.866 ******* TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Wednesday 02 April 2025 12:21:36 -0400 (0:00:00.086) 0:03:50.953 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Wednesday 02 April 2025 12:21:36 -0400 (0:00:00.068) 0:03:51.021 ******* ok: [managed-node1] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Wednesday 02 April 2025 12:21:37 -0400 (0:00:00.430) 0:03:51.452 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Wednesday 02 April 2025 12:21:37 -0400 (0:00:00.043) 0:03:51.495 ******* changed: [managed-node1] => { "changed": true, "checksum": "19c9b17be2af9b9deca5c3bd327f048966750682", "dest": "/etc/containers/systemd/quadlet-basic.network", "gid": 0, "group": "root", "md5sum": "313e9a2e5a99f80fa7023c19a1065658", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 105, "src": "/root/.ansible/tmp/ansible-tmp-1743610897.2148337-21372-133279900083913/source", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Wednesday 02 April 2025 12:21:37 -0400 (0:00:00.734) 0:03:52.230 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Wednesday 02 April 2025 12:21:37 -0400 (0:00:00.042) 0:03:52.272 ******* ok: [managed-node1] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Wednesday 02 April 2025 12:21:38 -0400 (0:00:00.621) 0:03:52.894 ******* changed: [managed-node1] => { "changed": true, "name": "quadlet-basic-network.service", "state": "started", "status": { "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "system.slice -.mount sysinit.target systemd-journald.socket basic.target", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "quadlet-basic-network.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore --subnet=192.168.29.0/24 --gateway=192.168.29.1 --label app=wordpress quadlet-basic-name ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/run/systemd/generator/quadlet-basic-network.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "quadlet-basic-network.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "MemoryAccounting": "yes", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "quadlet-basic-network.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "system.slice -.mount sysinit.target", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-basic-network", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22405", "TimeoutStartUSec": "infinity", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Wednesday 02 April 2025 12:21:39 -0400 (0:00:00.583) 0:03:53.478 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Wednesday 02 April 2025 12:21:39 -0400 (0:00:00.046) 0:03:53.524 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Network": {} }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Wednesday 02 April 2025 12:21:39 -0400 (0:00:00.053) 0:03:53.577 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Wednesday 02 April 2025 12:21:39 -0400 (0:00:00.051) 0:03:53.629 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Wednesday 02 April 2025 12:21:39 -0400 (0:00:00.043) 0:03:53.672 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-basic-unused-network", "__podman_quadlet_type": "network", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Wednesday 02 April 2025 12:21:39 -0400 (0:00:00.059) 0:03:53.731 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:21:39 -0400 (0:00:00.076) 0:03:53.807 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:21:39 -0400 (0:00:00.129) 0:03:53.937 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:21:39 -0400 (0:00:00.052) 0:03:53.989 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:21:39 -0400 (0:00:00.063) 0:03:54.052 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610609.2783134, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1743610533.3987217, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986653, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "4263604762", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:21:40 -0400 (0:00:00.387) 0:03:54.439 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:21:40 -0400 (0:00:00.045) 0:03:54.485 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:21:40 -0400 (0:00:00.044) 0:03:54.529 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:21:40 -0400 (0:00:00.045) 0:03:54.574 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:21:40 -0400 (0:00:00.045) 0:03:54.620 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:21:40 -0400 (0:00:00.043) 0:03:54.663 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:21:40 -0400 (0:00:00.045) 0:03:54.709 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:21:40 -0400 (0:00:00.045) 0:03:54.754 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Wednesday 02 April 2025 12:21:40 -0400 (0:00:00.044) 0:03:54.798 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-basic-unused-network-network.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Wednesday 02 April 2025 12:21:40 -0400 (0:00:00.077) 0:03:54.876 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Wednesday 02 April 2025 12:21:40 -0400 (0:00:00.046) 0:03:54.922 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Wednesday 02 April 2025 12:21:40 -0400 (0:00:00.042) 0:03:54.965 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-basic-unused-network.network", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Wednesday 02 April 2025 12:21:40 -0400 (0:00:00.176) 0:03:55.142 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Wednesday 02 April 2025 12:21:40 -0400 (0:00:00.050) 0:03:55.193 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Wednesday 02 April 2025 12:21:40 -0400 (0:00:00.041) 0:03:55.234 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Wednesday 02 April 2025 12:21:40 -0400 (0:00:00.087) 0:03:55.321 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Wednesday 02 April 2025 12:21:41 -0400 (0:00:00.071) 0:03:55.392 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Wednesday 02 April 2025 12:21:41 -0400 (0:00:00.042) 0:03:55.434 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Wednesday 02 April 2025 12:21:41 -0400 (0:00:00.043) 0:03:55.478 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Wednesday 02 April 2025 12:21:41 -0400 (0:00:00.042) 0:03:55.521 ******* TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Wednesday 02 April 2025 12:21:41 -0400 (0:00:00.041) 0:03:55.562 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Wednesday 02 April 2025 12:21:41 -0400 (0:00:00.042) 0:03:55.604 ******* ok: [managed-node1] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 35, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Wednesday 02 April 2025 12:21:41 -0400 (0:00:00.394) 0:03:55.999 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Wednesday 02 April 2025 12:21:41 -0400 (0:00:00.041) 0:03:56.041 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Wednesday 02 April 2025 12:21:41 -0400 (0:00:00.120) 0:03:56.161 ******* changed: [managed-node1] => { "changed": true, "checksum": "52c9d75ecaf81203cc1f1a3b1dd00fcd25067b01", "dest": "/etc/containers/systemd/quadlet-basic-unused-network.network", "gid": 0, "group": "root", "md5sum": "968d495367b59475979615e4884cbda2", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 54, "src": "/root/.ansible/tmp/ansible-tmp-1743610901.8747222-21524-125519104427091/source", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Wednesday 02 April 2025 12:21:42 -0400 (0:00:00.719) 0:03:56.881 ******* ok: [managed-node1] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Wednesday 02 April 2025 12:21:43 -0400 (0:00:00.619) 0:03:57.500 ******* changed: [managed-node1] => { "changed": true, "name": "quadlet-basic-unused-network-network.service", "state": "started", "status": { "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "system.slice systemd-journald.socket sysinit.target basic.target -.mount", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "quadlet-basic-unused-network-network.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore systemd-quadlet-basic-unused-network ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/run/systemd/generator/quadlet-basic-unused-network-network.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "quadlet-basic-unused-network-network.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "MemoryAccounting": "yes", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "quadlet-basic-unused-network-network.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "system.slice sysinit.target -.mount", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-basic-unused-network-network", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22405", "TimeoutStartUSec": "infinity", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Wednesday 02 April 2025 12:21:43 -0400 (0:00:00.581) 0:03:58.081 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Wednesday 02 April 2025 12:21:43 -0400 (0:00:00.054) 0:03:58.136 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Volume": { "VolumeName": "quadlet-basic-mysql-name" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Wednesday 02 April 2025 12:21:43 -0400 (0:00:00.052) 0:03:58.188 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Wednesday 02 April 2025 12:21:43 -0400 (0:00:00.050) 0:03:58.239 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Wednesday 02 April 2025 12:21:43 -0400 (0:00:00.040) 0:03:58.279 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-basic-mysql", "__podman_quadlet_type": "volume", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Wednesday 02 April 2025 12:21:43 -0400 (0:00:00.058) 0:03:58.337 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:21:44 -0400 (0:00:00.151) 0:03:58.489 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:21:44 -0400 (0:00:00.073) 0:03:58.562 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:21:44 -0400 (0:00:00.067) 0:03:58.630 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:21:44 -0400 (0:00:00.065) 0:03:58.695 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610609.2783134, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1743610533.3987217, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986653, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "4263604762", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:21:44 -0400 (0:00:00.420) 0:03:59.116 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:21:44 -0400 (0:00:00.067) 0:03:59.183 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:21:44 -0400 (0:00:00.057) 0:03:59.241 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:21:44 -0400 (0:00:00.055) 0:03:59.296 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:21:45 -0400 (0:00:00.044) 0:03:59.340 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:21:45 -0400 (0:00:00.042) 0:03:59.383 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:21:45 -0400 (0:00:00.044) 0:03:59.428 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:21:45 -0400 (0:00:00.051) 0:03:59.479 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Wednesday 02 April 2025 12:21:45 -0400 (0:00:00.048) 0:03:59.528 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-basic-mysql-volume.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Wednesday 02 April 2025 12:21:45 -0400 (0:00:00.089) 0:03:59.617 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Wednesday 02 April 2025 12:21:45 -0400 (0:00:00.050) 0:03:59.668 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Wednesday 02 April 2025 12:21:45 -0400 (0:00:00.114) 0:03:59.782 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-basic-mysql.volume", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Wednesday 02 April 2025 12:21:45 -0400 (0:00:00.122) 0:03:59.904 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Wednesday 02 April 2025 12:21:45 -0400 (0:00:00.076) 0:03:59.980 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Wednesday 02 April 2025 12:21:45 -0400 (0:00:00.052) 0:04:00.033 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Wednesday 02 April 2025 12:21:45 -0400 (0:00:00.112) 0:04:00.146 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Wednesday 02 April 2025 12:21:45 -0400 (0:00:00.085) 0:04:00.231 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Wednesday 02 April 2025 12:21:45 -0400 (0:00:00.042) 0:04:00.273 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Wednesday 02 April 2025 12:21:45 -0400 (0:00:00.042) 0:04:00.316 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Wednesday 02 April 2025 12:21:46 -0400 (0:00:00.041) 0:04:00.357 ******* TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Wednesday 02 April 2025 12:21:46 -0400 (0:00:00.041) 0:04:00.398 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Wednesday 02 April 2025 12:21:46 -0400 (0:00:00.041) 0:04:00.439 ******* ok: [managed-node1] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 79, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Wednesday 02 April 2025 12:21:46 -0400 (0:00:00.399) 0:04:00.839 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Wednesday 02 April 2025 12:21:46 -0400 (0:00:00.115) 0:04:00.955 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Wednesday 02 April 2025 12:21:46 -0400 (0:00:00.041) 0:04:00.997 ******* changed: [managed-node1] => { "changed": true, "checksum": "90a3571bfc7670328fe3f8fb625585613dbd9c4a", "dest": "/etc/containers/systemd/quadlet-basic-mysql.volume", "gid": 0, "group": "root", "md5sum": "8682d71bf3c086f228cd72389b7c9018", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 89, "src": "/root/.ansible/tmp/ansible-tmp-1743610906.7097242-21707-17809242046552/source", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Wednesday 02 April 2025 12:21:47 -0400 (0:00:00.736) 0:04:01.733 ******* ok: [managed-node1] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Wednesday 02 April 2025 12:21:48 -0400 (0:00:00.663) 0:04:02.397 ******* changed: [managed-node1] => { "changed": true, "name": "quadlet-basic-mysql-volume.service", "state": "started", "status": { "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "sysinit.target systemd-journald.socket basic.target -.mount system.slice", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "quadlet-basic-mysql-volume.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore quadlet-basic-mysql-name ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/run/systemd/generator/quadlet-basic-mysql-volume.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "quadlet-basic-mysql-volume.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "MemoryAccounting": "yes", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "quadlet-basic-mysql-volume.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "sysinit.target -.mount system.slice", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-basic-mysql-volume", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22405", "TimeoutStartUSec": "infinity", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Wednesday 02 April 2025 12:21:48 -0400 (0:00:00.632) 0:04:03.030 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Wednesday 02 April 2025 12:21:48 -0400 (0:00:00.066) 0:04:03.096 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Volume": {} }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Wednesday 02 April 2025 12:21:48 -0400 (0:00:00.081) 0:04:03.178 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Wednesday 02 April 2025 12:21:48 -0400 (0:00:00.096) 0:04:03.274 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Wednesday 02 April 2025 12:21:49 -0400 (0:00:00.065) 0:04:03.340 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-basic-unused-volume", "__podman_quadlet_type": "volume", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Wednesday 02 April 2025 12:21:49 -0400 (0:00:00.089) 0:04:03.430 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:21:49 -0400 (0:00:00.199) 0:04:03.629 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:21:49 -0400 (0:00:00.081) 0:04:03.711 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:21:49 -0400 (0:00:00.082) 0:04:03.793 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:21:49 -0400 (0:00:00.088) 0:04:03.882 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610609.2783134, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1743610533.3987217, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986653, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "4263604762", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:21:49 -0400 (0:00:00.425) 0:04:04.308 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:21:50 -0400 (0:00:00.048) 0:04:04.356 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:21:50 -0400 (0:00:00.049) 0:04:04.406 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:21:50 -0400 (0:00:00.059) 0:04:04.465 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:21:50 -0400 (0:00:00.057) 0:04:04.523 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:21:50 -0400 (0:00:00.051) 0:04:04.574 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:21:50 -0400 (0:00:00.054) 0:04:04.629 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:21:50 -0400 (0:00:00.050) 0:04:04.680 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Wednesday 02 April 2025 12:21:50 -0400 (0:00:00.043) 0:04:04.723 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-basic-unused-volume-volume.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Wednesday 02 April 2025 12:21:50 -0400 (0:00:00.071) 0:04:04.794 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Wednesday 02 April 2025 12:21:50 -0400 (0:00:00.051) 0:04:04.846 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Wednesday 02 April 2025 12:21:50 -0400 (0:00:00.132) 0:04:04.979 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-basic-unused-volume.volume", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Wednesday 02 April 2025 12:21:50 -0400 (0:00:00.102) 0:04:05.082 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Wednesday 02 April 2025 12:21:50 -0400 (0:00:00.049) 0:04:05.132 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Wednesday 02 April 2025 12:21:50 -0400 (0:00:00.040) 0:04:05.172 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Wednesday 02 April 2025 12:21:50 -0400 (0:00:00.093) 0:04:05.266 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Wednesday 02 April 2025 12:21:51 -0400 (0:00:00.120) 0:04:05.386 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Wednesday 02 April 2025 12:21:51 -0400 (0:00:00.069) 0:04:05.456 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Wednesday 02 April 2025 12:21:51 -0400 (0:00:00.068) 0:04:05.525 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Wednesday 02 April 2025 12:21:51 -0400 (0:00:00.053) 0:04:05.578 ******* TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Wednesday 02 April 2025 12:21:51 -0400 (0:00:00.048) 0:04:05.626 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Wednesday 02 April 2025 12:21:51 -0400 (0:00:00.052) 0:04:05.679 ******* ok: [managed-node1] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 113, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Wednesday 02 April 2025 12:21:51 -0400 (0:00:00.397) 0:04:06.077 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Wednesday 02 April 2025 12:21:51 -0400 (0:00:00.128) 0:04:06.205 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Wednesday 02 April 2025 12:21:51 -0400 (0:00:00.064) 0:04:06.269 ******* changed: [managed-node1] => { "changed": true, "checksum": "fd0ae560360afa5541b866560b1e849d25e216ef", "dest": "/etc/containers/systemd/quadlet-basic-unused-volume.volume", "gid": 0, "group": "root", "md5sum": "4967598a0284ad3e296ab106829a30a2", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 53, "src": "/root/.ansible/tmp/ansible-tmp-1743610911.9956682-21960-107210692685763/source", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Wednesday 02 April 2025 12:21:52 -0400 (0:00:00.825) 0:04:07.094 ******* ok: [managed-node1] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Wednesday 02 April 2025 12:21:53 -0400 (0:00:00.641) 0:04:07.735 ******* changed: [managed-node1] => { "changed": true, "name": "quadlet-basic-unused-volume-volume.service", "state": "started", "status": { "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "systemd-journald.socket -.mount basic.target system.slice sysinit.target", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "quadlet-basic-unused-volume-volume.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore systemd-quadlet-basic-unused-volume ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/run/systemd/generator/quadlet-basic-unused-volume-volume.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "quadlet-basic-unused-volume-volume.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "MemoryAccounting": "yes", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "quadlet-basic-unused-volume-volume.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "-.mount system.slice sysinit.target", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-basic-unused-volume-volume", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22405", "TimeoutStartUSec": "infinity", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Wednesday 02 April 2025 12:21:54 -0400 (0:00:00.650) 0:04:08.386 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Wednesday 02 April 2025 12:21:54 -0400 (0:00:00.069) 0:04:08.455 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Container": { "ContainerName": "quadlet-basic-mysql-name", "Environment": [ "FOO=/bin/busybox-extras", "BAZ=test" ], "Image": "quay.io/linux-system-roles/mysql:5.6", "Network": "quadlet-basic.network", "PodmanArgs": "--secret=mysql_container_root_password,type=env,target=MYSQL_ROOT_PASSWORD --secret=json_secret,type=mount,target=/tmp/test.json", "Volume": "quadlet-basic-mysql.volume:/var/lib/mysql" }, "Install": { "WantedBy": "default.target" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Wednesday 02 April 2025 12:21:54 -0400 (0:00:00.107) 0:04:08.563 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Wednesday 02 April 2025 12:21:54 -0400 (0:00:00.079) 0:04:08.642 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Wednesday 02 April 2025 12:21:54 -0400 (0:00:00.048) 0:04:08.691 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-basic-mysql", "__podman_quadlet_type": "container", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Wednesday 02 April 2025 12:21:54 -0400 (0:00:00.069) 0:04:08.760 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:21:54 -0400 (0:00:00.158) 0:04:08.919 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:21:54 -0400 (0:00:00.051) 0:04:08.970 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:21:54 -0400 (0:00:00.050) 0:04:09.020 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:21:54 -0400 (0:00:00.055) 0:04:09.076 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610609.2783134, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1743610533.3987217, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986653, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "4263604762", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:21:55 -0400 (0:00:00.410) 0:04:09.486 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:21:55 -0400 (0:00:00.052) 0:04:09.538 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:21:55 -0400 (0:00:00.043) 0:04:09.582 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:21:55 -0400 (0:00:00.051) 0:04:09.633 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:21:55 -0400 (0:00:00.051) 0:04:09.685 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:21:55 -0400 (0:00:00.053) 0:04:09.738 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:21:55 -0400 (0:00:00.054) 0:04:09.792 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:21:55 -0400 (0:00:00.048) 0:04:09.841 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Wednesday 02 April 2025 12:21:55 -0400 (0:00:00.043) 0:04:09.885 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [ "quay.io/linux-system-roles/mysql:5.6" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-basic-mysql.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Wednesday 02 April 2025 12:21:55 -0400 (0:00:00.072) 0:04:09.957 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Wednesday 02 April 2025 12:21:55 -0400 (0:00:00.051) 0:04:10.008 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Wednesday 02 April 2025 12:21:55 -0400 (0:00:00.150) 0:04:10.159 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_images": [ "quay.io/linux-system-roles/mysql:5.6" ], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-basic-mysql.container", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Wednesday 02 April 2025 12:21:55 -0400 (0:00:00.115) 0:04:10.274 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Wednesday 02 April 2025 12:21:55 -0400 (0:00:00.062) 0:04:10.337 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Wednesday 02 April 2025 12:21:56 -0400 (0:00:00.047) 0:04:10.384 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Wednesday 02 April 2025 12:21:56 -0400 (0:00:00.086) 0:04:10.471 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Wednesday 02 April 2025 12:21:56 -0400 (0:00:00.079) 0:04:10.550 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Wednesday 02 April 2025 12:21:56 -0400 (0:00:00.042) 0:04:10.592 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Wednesday 02 April 2025 12:21:56 -0400 (0:00:00.042) 0:04:10.635 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Wednesday 02 April 2025 12:21:56 -0400 (0:00:00.040) 0:04:10.675 ******* TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Wednesday 02 April 2025 12:21:56 -0400 (0:00:00.040) 0:04:10.716 ******* changed: [managed-node1] => (item=None) => { "attempts": 1, "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } changed: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Wednesday 02 April 2025 12:22:05 -0400 (0:00:08.656) 0:04:19.373 ******* ok: [managed-node1] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 155, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Wednesday 02 April 2025 12:22:05 -0400 (0:00:00.392) 0:04:19.765 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Wednesday 02 April 2025 12:22:05 -0400 (0:00:00.043) 0:04:19.808 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Wednesday 02 April 2025 12:22:05 -0400 (0:00:00.044) 0:04:19.852 ******* changed: [managed-node1] => { "changed": true, "checksum": "0b6cac7929623f1059e78ef39b8b0a25169b28a6", "dest": "/etc/containers/systemd/quadlet-basic-mysql.container", "gid": 0, "group": "root", "md5sum": "1ede2d50fe62a3ca756acb50f2f6868e", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 448, "src": "/root/.ansible/tmp/ansible-tmp-1743610925.566711-22356-256581358651919/source", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Wednesday 02 April 2025 12:22:06 -0400 (0:00:00.716) 0:04:20.569 ******* ok: [managed-node1] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Wednesday 02 April 2025 12:22:06 -0400 (0:00:00.624) 0:04:21.193 ******* changed: [managed-node1] => { "changed": true, "name": "quadlet-basic-mysql.service", "state": "started", "status": { "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "system.slice quadlet-basic-network.service sysinit.target basic.target systemd-journald.socket -.mount quadlet-basic-mysql-volume.service", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "yes", "DelegateControllers": "cpu cpuacct cpuset io blkio memory devices pids", "Description": "quadlet-basic-mysql.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-basic-mysql.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name=quadlet-basic-mysql-name --cidfile=/run/quadlet-basic-mysql.cid --replace --rm --cgroups=split --network=quadlet-basic-name --sdnotify=conmon -d -v quadlet-basic-mysql-name:/var/lib/mysql --env BAZ=test --env FOO=/bin/busybox-extras --secret=mysql_container_root_password,type=env,target=MYSQL_ROOT_PASSWORD --secret=json_secret,type=mount,target=/tmp/test.json quay.io/linux-system-roles/mysql:5.6 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-basic-mysql.cid ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-basic-mysql.cid ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/run/systemd/generator/quadlet-basic-mysql.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "quadlet-basic-mysql.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "MemoryAccounting": "yes", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "quadlet-basic-mysql.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "quadlet-basic-network.service system.slice sysinit.target -.mount quadlet-basic-mysql-volume.service", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-basic-mysql.container", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-basic-mysql", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22405", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Wednesday 02 April 2025 12:22:07 -0400 (0:00:00.958) 0:04:22.152 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:198 Wednesday 02 April 2025 12:22:07 -0400 (0:00:00.043) 0:04:22.195 ******* TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:204 Wednesday 02 April 2025 12:22:07 -0400 (0:00:00.039) 0:04:22.235 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:213 Wednesday 02 April 2025 12:22:07 -0400 (0:00:00.040) 0:04:22.275 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Check files] ************************************************************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:253 Wednesday 02 April 2025 12:22:07 -0400 (0:00:00.038) 0:04:22.314 ******* ok: [managed-node1] => (item=quadlet-basic-mysql.container) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "cat", "/etc/containers/systemd/quadlet-basic-mysql.container" ], "delta": "0:00:00.003360", "end": "2025-04-02 12:22:08.298974", "item": "quadlet-basic-mysql.container", "rc": 0, "start": "2025-04-02 12:22:08.295614" } STDOUT: # # Ansible managed # # system_role:podman [Install] WantedBy=default.target [Container] Image=quay.io/linux-system-roles/mysql:5.6 ContainerName=quadlet-basic-mysql-name Volume=quadlet-basic-mysql.volume:/var/lib/mysql Network=quadlet-basic.network PodmanArgs=--secret=mysql_container_root_password,type=env,target=MYSQL_ROOT_PASSWORD --secret=json_secret,type=mount,target=/tmp/test.json Environment=FOO=/bin/busybox-extras Environment=BAZ=test ok: [managed-node1] => (item=quadlet-basic.network) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "cat", "/etc/containers/systemd/quadlet-basic.network" ], "delta": "0:00:00.002477", "end": "2025-04-02 12:22:08.646236", "item": "quadlet-basic.network", "rc": 0, "start": "2025-04-02 12:22:08.643759" } STDOUT: [Network] Subnet=192.168.29.0/24 Gateway=192.168.29.1 Label=app=wordpress NetworkName=quadlet-basic-name ok: [managed-node1] => (item=quadlet-basic-mysql.volume) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "cat", "/etc/containers/systemd/quadlet-basic-mysql.volume" ], "delta": "0:00:00.002509", "end": "2025-04-02 12:22:09.016525", "item": "quadlet-basic-mysql.volume", "rc": 0, "start": "2025-04-02 12:22:09.014016" } STDOUT: # # Ansible managed # # system_role:podman [Volume] VolumeName=quadlet-basic-mysql-name TASK [Check JSON] ************************************************************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:263 Wednesday 02 April 2025 12:22:09 -0400 (0:00:01.115) 0:04:23.429 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "exec", "quadlet-basic-mysql-name", "cat", "/tmp/test.json" ], "delta": "0:00:00.199494", "end": "2025-04-02 12:22:09.593491", "failed_when_result": false, "rc": 0, "start": "2025-04-02 12:22:09.393997" } STDOUT: {"test": "json"} TASK [Cleanup user] ************************************************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:299 Wednesday 02 April 2025 12:22:09 -0400 (0:00:00.580) 0:04:24.010 ******* TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Wednesday 02 April 2025 12:22:09 -0400 (0:00:00.154) 0:04:24.164 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Wednesday 02 April 2025 12:22:09 -0400 (0:00:00.069) 0:04:24.234 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Wednesday 02 April 2025 12:22:09 -0400 (0:00:00.052) 0:04:24.286 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Wednesday 02 April 2025 12:22:09 -0400 (0:00:00.042) 0:04:24.329 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Wednesday 02 April 2025 12:22:10 -0400 (0:00:00.040) 0:04:24.370 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Wednesday 02 April 2025 12:22:10 -0400 (0:00:00.042) 0:04:24.413 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Wednesday 02 April 2025 12:22:10 -0400 (0:00:00.042) 0:04:24.455 ******* ok: [managed-node1] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node1] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node1] => (item=CentOS_8.yml) => { "ansible_facts": { "__podman_packages": [ "crun", "podman", "podman-plugins", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [managed-node1] => (item=CentOS_8.yml) => { "ansible_facts": { "__podman_packages": [ "crun", "podman", "podman-plugins", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Wednesday 02 April 2025 12:22:10 -0400 (0:00:00.105) 0:04:24.560 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Wednesday 02 April 2025 12:22:11 -0400 (0:00:01.642) 0:04:26.203 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Wednesday 02 April 2025 12:22:11 -0400 (0:00:00.039) 0:04:26.243 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Wednesday 02 April 2025 12:22:11 -0400 (0:00:00.054) 0:04:26.297 ******* skipping: [managed-node1] => {} TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Wednesday 02 April 2025 12:22:12 -0400 (0:00:00.042) 0:04:26.340 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Wednesday 02 April 2025 12:22:12 -0400 (0:00:00.040) 0:04:26.380 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Wednesday 02 April 2025 12:22:12 -0400 (0:00:00.041) 0:04:26.422 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.027139", "end": "2025-04-02 12:22:12.410396", "rc": 0, "start": "2025-04-02 12:22:12.383257" } STDOUT: podman version 4.9.4-dev TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Wednesday 02 April 2025 12:22:12 -0400 (0:00:00.410) 0:04:26.832 ******* ok: [managed-node1] => { "ansible_facts": { "podman_version": "4.9.4-dev" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Wednesday 02 April 2025 12:22:12 -0400 (0:00:00.042) 0:04:26.874 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Wednesday 02 April 2025 12:22:12 -0400 (0:00:00.042) 0:04:26.917 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } META: end_host conditional evaluated to false, continuing execution for managed-node1 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Wednesday 02 April 2025 12:22:12 -0400 (0:00:00.134) 0:04:27.051 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } META: end_host conditional evaluated to false, continuing execution for managed-node1 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Wednesday 02 April 2025 12:22:12 -0400 (0:00:00.199) 0:04:27.250 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:22:12 -0400 (0:00:00.079) 0:04:27.330 ******* ok: [managed-node1] => { "ansible_facts": { "getent_passwd": { "user_quadlet_basic": [ "x", "1111", "1111", "", "/home/user_quadlet_basic", "/bin/bash" ] } }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:22:13 -0400 (0:00:00.411) 0:04:27.742 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:22:13 -0400 (0:00:00.051) 0:04:27.793 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_group": "1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:22:13 -0400 (0:00:00.063) 0:04:27.857 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610609.2783134, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1743610533.3987217, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986653, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "4263604762", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:22:13 -0400 (0:00:00.458) 0:04:28.315 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_basic" ], "delta": "0:00:00.004496", "end": "2025-04-02 12:22:14.410016", "rc": 0, "start": "2025-04-02 12:22:14.405520" } STDOUT: 0: user_quadlet_basic 100000 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:22:14 -0400 (0:00:00.522) 0:04:28.838 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_basic" ], "delta": "0:00:00.004919", "end": "2025-04-02 12:22:14.929589", "rc": 0, "start": "2025-04-02 12:22:14.924670" } STDOUT: 0: user_quadlet_basic 100000 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:22:15 -0400 (0:00:00.519) 0:04:29.357 ******* ok: [managed-node1] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_basic": { "range": 65536, "start": 100000 } }, "podman_subuid_info": { "user_quadlet_basic": { "range": 65536, "start": 100000 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:22:15 -0400 (0:00:00.122) 0:04:29.480 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:22:15 -0400 (0:00:00.042) 0:04:29.523 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:22:15 -0400 (0:00:00.044) 0:04:29.568 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:22:15 -0400 (0:00:00.044) 0:04:29.612 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:22:15 -0400 (0:00:00.043) 0:04:29.655 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Wednesday 02 April 2025 12:22:15 -0400 (0:00:00.044) 0:04:29.699 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_container_conf_file": "/root/.config/containers/containers.conf.d/50-systemroles.conf", "__podman_parent_mode": "0700", "__podman_parent_path": "/root/.config/containers", "__podman_policy_json_file": "/root/.config/containers/policy.json", "__podman_registries_conf_file": "/root/.config/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/root/.config/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:126 Wednesday 02 April 2025 12:22:15 -0400 (0:00:00.056) 0:04:29.756 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Wednesday 02 April 2025 12:22:15 -0400 (0:00:00.076) 0:04:29.832 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Wednesday 02 April 2025 12:22:15 -0400 (0:00:00.049) 0:04:29.882 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:129 Wednesday 02 April 2025 12:22:15 -0400 (0:00:00.042) 0:04:29.924 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Wednesday 02 April 2025 12:22:15 -0400 (0:00:00.082) 0:04:30.007 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Wednesday 02 April 2025 12:22:15 -0400 (0:00:00.042) 0:04:30.050 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:132 Wednesday 02 April 2025 12:22:15 -0400 (0:00:00.039) 0:04:30.090 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:7 Wednesday 02 April 2025 12:22:15 -0400 (0:00:00.144) 0:04:30.234 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:15 Wednesday 02 April 2025 12:22:15 -0400 (0:00:00.040) 0:04:30.274 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:135 Wednesday 02 April 2025 12:22:15 -0400 (0:00:00.040) 0:04:30.315 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:8 Wednesday 02 April 2025 12:22:16 -0400 (0:00:00.086) 0:04:30.402 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:16 Wednesday 02 April 2025 12:22:16 -0400 (0:00:00.039) 0:04:30.442 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:21 Wednesday 02 April 2025 12:22:16 -0400 (0:00:00.041) 0:04:30.483 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:27 Wednesday 02 April 2025 12:22:16 -0400 (0:00:00.041) 0:04:30.525 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:141 Wednesday 02 April 2025 12:22:16 -0400 (0:00:00.039) 0:04:30.565 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:148 Wednesday 02 April 2025 12:22:16 -0400 (0:00:00.040) 0:04:30.606 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:155 Wednesday 02 April 2025 12:22:16 -0400 (0:00:00.041) 0:04:30.647 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:159 Wednesday 02 April 2025 12:22:16 -0400 (0:00:00.039) 0:04:30.686 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:168 Wednesday 02 April 2025 12:22:16 -0400 (0:00:00.039) 0:04:30.725 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:177 Wednesday 02 April 2025 12:22:16 -0400 (0:00:00.038) 0:04:30.764 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node1 included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Wednesday 02 April 2025 12:22:16 -0400 (0:00:00.130) 0:04:30.895 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_user": "user_quadlet_basic" }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Wednesday 02 April 2025 12:22:16 -0400 (0:00:00.113) 0:04:31.009 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:22:16 -0400 (0:00:00.074) 0:04:31.083 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:22:16 -0400 (0:00:00.051) 0:04:31.135 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:22:16 -0400 (0:00:00.050) 0:04:31.185 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_group": "1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:22:16 -0400 (0:00:00.056) 0:04:31.242 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:22:16 -0400 (0:00:00.039) 0:04:31.281 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:22:16 -0400 (0:00:00.041) 0:04:31.323 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:22:17 -0400 (0:00:00.042) 0:04:31.365 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:22:17 -0400 (0:00:00.039) 0:04:31.405 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:22:17 -0400 (0:00:00.041) 0:04:31.446 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:22:17 -0400 (0:00:00.042) 0:04:31.489 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:22:17 -0400 (0:00:00.039) 0:04:31.528 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:22:17 -0400 (0:00:00.041) 0:04:31.570 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14 Wednesday 02 April 2025 12:22:17 -0400 (0:00:00.041) 0:04:31.611 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_rootless": true, "__podman_xdg_runtime_dir": "/run/user/1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20 Wednesday 02 April 2025 12:22:17 -0400 (0:00:00.051) 0:04:31.663 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Wednesday 02 April 2025 12:22:17 -0400 (0:00:00.140) 0:04:31.803 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Wednesday 02 April 2025 12:22:17 -0400 (0:00:00.042) 0:04:31.846 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Wednesday 02 April 2025 12:22:17 -0400 (0:00:00.043) 0:04:31.890 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_cancel_user_linger": [ "user_quadlet_basic" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25 Wednesday 02 April 2025 12:22:17 -0400 (0:00:00.059) 0:04:31.949 ******* ok: [managed-node1] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41 Wednesday 02 April 2025 12:22:18 -0400 (0:00:00.388) 0:04:32.338 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Wednesday 02 April 2025 12:22:18 -0400 (0:00:00.044) 0:04:32.383 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_user": "user_quadlet_basic" }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Wednesday 02 April 2025 12:22:18 -0400 (0:00:00.043) 0:04:32.427 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:22:18 -0400 (0:00:00.073) 0:04:32.500 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:22:18 -0400 (0:00:00.050) 0:04:32.550 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:22:18 -0400 (0:00:00.049) 0:04:32.600 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_group": "1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:22:18 -0400 (0:00:00.053) 0:04:32.654 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:22:18 -0400 (0:00:00.041) 0:04:32.695 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:22:18 -0400 (0:00:00.112) 0:04:32.808 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:22:18 -0400 (0:00:00.040) 0:04:32.848 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:22:18 -0400 (0:00:00.042) 0:04:32.891 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:22:18 -0400 (0:00:00.044) 0:04:32.935 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:22:18 -0400 (0:00:00.045) 0:04:32.980 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:22:18 -0400 (0:00:00.041) 0:04:33.022 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:22:18 -0400 (0:00:00.041) 0:04:33.064 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14 Wednesday 02 April 2025 12:22:18 -0400 (0:00:00.039) 0:04:33.104 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_rootless": true, "__podman_xdg_runtime_dir": "/run/user/1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20 Wednesday 02 April 2025 12:22:18 -0400 (0:00:00.054) 0:04:33.158 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Wednesday 02 April 2025 12:22:18 -0400 (0:00:00.077) 0:04:33.235 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Wednesday 02 April 2025 12:22:18 -0400 (0:00:00.043) 0:04:33.279 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Wednesday 02 April 2025 12:22:18 -0400 (0:00:00.045) 0:04:33.325 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_cancel_user_linger": [ "user_quadlet_basic" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25 Wednesday 02 April 2025 12:22:19 -0400 (0:00:00.049) 0:04:33.374 ******* ok: [managed-node1] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41 Wednesday 02 April 2025 12:22:19 -0400 (0:00:00.372) 0:04:33.747 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:184 Wednesday 02 April 2025 12:22:19 -0400 (0:00:00.112) 0:04:33.860 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:191 Wednesday 02 April 2025 12:22:19 -0400 (0:00:00.039) 0:04:33.899 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Wednesday 02 April 2025 12:22:19 -0400 (0:00:00.169) 0:04:34.069 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Container": { "ContainerName": "quadlet-basic-mysql-name", "Environment": [ "FOO=/bin/busybox-extras", "BAZ=test" ], "Image": "quay.io/linux-system-roles/mysql:5.6", "Network": "quadlet-basic.network", "PodmanArgs": "--secret=mysql_container_root_password,type=env,target=MYSQL_ROOT_PASSWORD --secret=json_secret,type=mount,target=/tmp/test.json", "Volume": "quadlet-basic-mysql.volume:/var/lib/mysql" }, "Install": { "WantedBy": "default.target" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Wednesday 02 April 2025 12:22:19 -0400 (0:00:00.049) 0:04:34.118 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_basic" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Wednesday 02 April 2025 12:22:19 -0400 (0:00:00.051) 0:04:34.170 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Wednesday 02 April 2025 12:22:19 -0400 (0:00:00.041) 0:04:34.211 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-basic-mysql", "__podman_quadlet_type": "container", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Wednesday 02 April 2025 12:22:19 -0400 (0:00:00.054) 0:04:34.266 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:22:20 -0400 (0:00:00.078) 0:04:34.344 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:22:20 -0400 (0:00:00.052) 0:04:34.397 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:22:20 -0400 (0:00:00.048) 0:04:34.445 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_group": "1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:22:20 -0400 (0:00:00.059) 0:04:34.505 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610609.2783134, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1743610533.3987217, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986653, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "4263604762", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:22:20 -0400 (0:00:00.482) 0:04:34.987 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_basic" ], "delta": "0:00:00.003179", "end": "2025-04-02 12:22:20.976294", "rc": 0, "start": "2025-04-02 12:22:20.973115" } STDOUT: 0: user_quadlet_basic 100000 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:22:21 -0400 (0:00:00.413) 0:04:35.400 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_basic" ], "delta": "0:00:00.003777", "end": "2025-04-02 12:22:21.385634", "rc": 0, "start": "2025-04-02 12:22:21.381857" } STDOUT: 0: user_quadlet_basic 100000 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:22:21 -0400 (0:00:00.409) 0:04:35.809 ******* ok: [managed-node1] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_basic": { "range": 65536, "start": 100000 } }, "podman_subuid_info": { "user_quadlet_basic": { "range": 65536, "start": 100000 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:22:21 -0400 (0:00:00.064) 0:04:35.874 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:22:21 -0400 (0:00:00.043) 0:04:35.918 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:22:21 -0400 (0:00:00.044) 0:04:35.962 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:22:21 -0400 (0:00:00.047) 0:04:36.009 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:22:21 -0400 (0:00:00.053) 0:04:36.063 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Wednesday 02 April 2025 12:22:21 -0400 (0:00:00.068) 0:04:36.132 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [ "quay.io/linux-system-roles/mysql:5.6" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-basic-mysql.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_basic", "__podman_xdg_runtime_dir": "/run/user/1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Wednesday 02 April 2025 12:22:21 -0400 (0:00:00.110) 0:04:36.242 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_basic/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Wednesday 02 April 2025 12:22:21 -0400 (0:00:00.071) 0:04:36.314 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Wednesday 02 April 2025 12:22:22 -0400 (0:00:00.058) 0:04:36.372 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_images": [ "quay.io/linux-system-roles/mysql:5.6" ], "__podman_quadlet_file": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.container", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Wednesday 02 April 2025 12:22:22 -0400 (0:00:00.142) 0:04:36.515 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Wednesday 02 April 2025 12:22:22 -0400 (0:00:00.053) 0:04:36.568 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Wednesday 02 April 2025 12:22:22 -0400 (0:00:00.160) 0:04:36.729 ******* ok: [managed-node1] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Wednesday 02 April 2025 12:22:22 -0400 (0:00:00.383) 0:04:37.113 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Wednesday 02 April 2025 12:22:22 -0400 (0:00:00.059) 0:04:37.173 ******* ok: [managed-node1] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Wednesday 02 April 2025 12:22:23 -0400 (0:00:00.370) 0:04:37.544 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Wednesday 02 April 2025 12:22:23 -0400 (0:00:00.041) 0:04:37.585 ******* ok: [managed-node1] => { "changed": false, "path": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.container", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Wednesday 02 April 2025 12:22:23 -0400 (0:00:00.373) 0:04:37.959 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Wednesday 02 April 2025 12:22:23 -0400 (0:00:00.043) 0:04:38.003 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Wednesday 02 April 2025 12:22:23 -0400 (0:00:00.053) 0:04:38.056 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Wednesday 02 April 2025 12:22:23 -0400 (0:00:00.060) 0:04:38.116 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Wednesday 02 April 2025 12:22:23 -0400 (0:00:00.038) 0:04:38.155 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Wednesday 02 April 2025 12:22:23 -0400 (0:00:00.046) 0:04:38.202 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Wednesday 02 April 2025 12:22:24 -0400 (0:00:00.142) 0:04:38.344 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Wednesday 02 April 2025 12:22:24 -0400 (0:00:00.043) 0:04:38.387 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Wednesday 02 April 2025 12:22:24 -0400 (0:00:00.045) 0:04:38.433 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_cancel_user_linger": [ "user_quadlet_basic" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Wednesday 02 April 2025 12:22:24 -0400 (0:00:00.047) 0:04:38.480 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Wednesday 02 April 2025 12:22:24 -0400 (0:00:00.045) 0:04:38.525 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Wednesday 02 April 2025 12:22:24 -0400 (0:00:00.044) 0:04:38.570 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Wednesday 02 April 2025 12:22:24 -0400 (0:00:00.043) 0:04:38.614 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Wednesday 02 April 2025 12:22:24 -0400 (0:00:00.045) 0:04:38.659 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Wednesday 02 April 2025 12:22:24 -0400 (0:00:00.045) 0:04:38.705 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Wednesday 02 April 2025 12:22:24 -0400 (0:00:00.043) 0:04:38.748 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Wednesday 02 April 2025 12:22:24 -0400 (0:00:00.046) 0:04:38.795 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Wednesday 02 April 2025 12:22:24 -0400 (0:00:00.041) 0:04:38.836 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Volume": {} }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Wednesday 02 April 2025 12:22:24 -0400 (0:00:00.050) 0:04:38.886 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_basic" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Wednesday 02 April 2025 12:22:24 -0400 (0:00:00.051) 0:04:38.937 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Wednesday 02 April 2025 12:22:24 -0400 (0:00:00.042) 0:04:38.980 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-basic-unused-volume", "__podman_quadlet_type": "volume", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Wednesday 02 April 2025 12:22:24 -0400 (0:00:00.056) 0:04:39.036 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:22:24 -0400 (0:00:00.156) 0:04:39.193 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:22:24 -0400 (0:00:00.052) 0:04:39.246 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:22:24 -0400 (0:00:00.049) 0:04:39.295 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_group": "1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:22:25 -0400 (0:00:00.055) 0:04:39.351 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610609.2783134, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1743610533.3987217, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986653, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "4263604762", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:22:25 -0400 (0:00:00.382) 0:04:39.733 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_basic" ], "delta": "0:00:00.003408", "end": "2025-04-02 12:22:25.707441", "rc": 0, "start": "2025-04-02 12:22:25.704033" } STDOUT: 0: user_quadlet_basic 100000 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:22:25 -0400 (0:00:00.421) 0:04:40.155 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_basic" ], "delta": "0:00:00.003324", "end": "2025-04-02 12:22:26.162696", "rc": 0, "start": "2025-04-02 12:22:26.159372" } STDOUT: 0: user_quadlet_basic 100000 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:22:26 -0400 (0:00:00.421) 0:04:40.577 ******* ok: [managed-node1] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_basic": { "range": 65536, "start": 100000 } }, "podman_subuid_info": { "user_quadlet_basic": { "range": 65536, "start": 100000 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:22:26 -0400 (0:00:00.064) 0:04:40.641 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:22:26 -0400 (0:00:00.055) 0:04:40.697 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:22:26 -0400 (0:00:00.060) 0:04:40.758 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:22:26 -0400 (0:00:00.049) 0:04:40.808 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:22:26 -0400 (0:00:00.129) 0:04:40.937 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Wednesday 02 April 2025 12:22:26 -0400 (0:00:00.045) 0:04:40.982 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-basic-unused-volume-volume.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_basic", "__podman_xdg_runtime_dir": "/run/user/1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Wednesday 02 April 2025 12:22:26 -0400 (0:00:00.084) 0:04:41.066 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_basic/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Wednesday 02 April 2025 12:22:26 -0400 (0:00:00.071) 0:04:41.138 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Wednesday 02 April 2025 12:22:26 -0400 (0:00:00.047) 0:04:41.185 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-volume.volume", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Wednesday 02 April 2025 12:22:26 -0400 (0:00:00.122) 0:04:41.308 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Wednesday 02 April 2025 12:22:27 -0400 (0:00:00.057) 0:04:41.365 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Wednesday 02 April 2025 12:22:27 -0400 (0:00:00.110) 0:04:41.476 ******* ok: [managed-node1] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Wednesday 02 April 2025 12:22:27 -0400 (0:00:00.385) 0:04:41.861 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Wednesday 02 April 2025 12:22:27 -0400 (0:00:00.045) 0:04:41.907 ******* ok: [managed-node1] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Wednesday 02 April 2025 12:22:27 -0400 (0:00:00.375) 0:04:42.282 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Wednesday 02 April 2025 12:22:27 -0400 (0:00:00.041) 0:04:42.323 ******* ok: [managed-node1] => { "changed": false, "path": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-volume.volume", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Wednesday 02 April 2025 12:22:28 -0400 (0:00:00.436) 0:04:42.760 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Wednesday 02 April 2025 12:22:28 -0400 (0:00:00.057) 0:04:42.817 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Wednesday 02 April 2025 12:22:28 -0400 (0:00:00.056) 0:04:42.874 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Wednesday 02 April 2025 12:22:28 -0400 (0:00:00.058) 0:04:42.933 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Wednesday 02 April 2025 12:22:28 -0400 (0:00:00.041) 0:04:42.974 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Wednesday 02 April 2025 12:22:28 -0400 (0:00:00.047) 0:04:43.021 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Wednesday 02 April 2025 12:22:28 -0400 (0:00:00.070) 0:04:43.092 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Wednesday 02 April 2025 12:22:28 -0400 (0:00:00.042) 0:04:43.135 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Wednesday 02 April 2025 12:22:28 -0400 (0:00:00.052) 0:04:43.188 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_cancel_user_linger": [ "user_quadlet_basic" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Wednesday 02 April 2025 12:22:28 -0400 (0:00:00.048) 0:04:43.236 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Wednesday 02 April 2025 12:22:28 -0400 (0:00:00.043) 0:04:43.280 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Wednesday 02 April 2025 12:22:28 -0400 (0:00:00.042) 0:04:43.322 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Wednesday 02 April 2025 12:22:29 -0400 (0:00:00.043) 0:04:43.365 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Wednesday 02 April 2025 12:22:29 -0400 (0:00:00.044) 0:04:43.410 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Wednesday 02 April 2025 12:22:29 -0400 (0:00:00.044) 0:04:43.454 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Wednesday 02 April 2025 12:22:29 -0400 (0:00:00.115) 0:04:43.570 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Wednesday 02 April 2025 12:22:29 -0400 (0:00:00.046) 0:04:43.616 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Wednesday 02 April 2025 12:22:29 -0400 (0:00:00.040) 0:04:43.656 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Volume": { "VolumeName": "quadlet-basic-mysql-name" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Wednesday 02 April 2025 12:22:29 -0400 (0:00:00.067) 0:04:43.723 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_basic" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Wednesday 02 April 2025 12:22:29 -0400 (0:00:00.077) 0:04:43.801 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Wednesday 02 April 2025 12:22:29 -0400 (0:00:00.049) 0:04:43.851 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-basic-mysql", "__podman_quadlet_type": "volume", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Wednesday 02 April 2025 12:22:29 -0400 (0:00:00.073) 0:04:43.924 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:22:29 -0400 (0:00:00.083) 0:04:44.007 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:22:29 -0400 (0:00:00.050) 0:04:44.058 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:22:29 -0400 (0:00:00.053) 0:04:44.112 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_group": "1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:22:29 -0400 (0:00:00.081) 0:04:44.193 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610609.2783134, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1743610533.3987217, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986653, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "4263604762", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:22:30 -0400 (0:00:00.427) 0:04:44.620 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_basic" ], "delta": "0:00:00.003271", "end": "2025-04-02 12:22:30.611637", "rc": 0, "start": "2025-04-02 12:22:30.608366" } STDOUT: 0: user_quadlet_basic 100000 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:22:30 -0400 (0:00:00.478) 0:04:45.099 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_basic" ], "delta": "0:00:00.003226", "end": "2025-04-02 12:22:31.105404", "rc": 0, "start": "2025-04-02 12:22:31.102178" } STDOUT: 0: user_quadlet_basic 100000 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:22:31 -0400 (0:00:00.423) 0:04:45.523 ******* ok: [managed-node1] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_basic": { "range": 65536, "start": 100000 } }, "podman_subuid_info": { "user_quadlet_basic": { "range": 65536, "start": 100000 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:22:31 -0400 (0:00:00.080) 0:04:45.604 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:22:31 -0400 (0:00:00.055) 0:04:45.659 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:22:31 -0400 (0:00:00.053) 0:04:45.713 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:22:31 -0400 (0:00:00.050) 0:04:45.763 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:22:31 -0400 (0:00:00.045) 0:04:45.808 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Wednesday 02 April 2025 12:22:31 -0400 (0:00:00.043) 0:04:45.852 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-basic-mysql-volume.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_basic", "__podman_xdg_runtime_dir": "/run/user/1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Wednesday 02 April 2025 12:22:31 -0400 (0:00:00.081) 0:04:45.933 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_basic/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Wednesday 02 April 2025 12:22:31 -0400 (0:00:00.066) 0:04:45.999 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Wednesday 02 April 2025 12:22:31 -0400 (0:00:00.062) 0:04:46.062 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.volume", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Wednesday 02 April 2025 12:22:31 -0400 (0:00:00.145) 0:04:46.208 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Wednesday 02 April 2025 12:22:31 -0400 (0:00:00.083) 0:04:46.292 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Wednesday 02 April 2025 12:22:32 -0400 (0:00:00.161) 0:04:46.454 ******* ok: [managed-node1] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Wednesday 02 April 2025 12:22:32 -0400 (0:00:00.439) 0:04:46.893 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Wednesday 02 April 2025 12:22:32 -0400 (0:00:00.068) 0:04:46.962 ******* ok: [managed-node1] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Wednesday 02 April 2025 12:22:33 -0400 (0:00:00.380) 0:04:47.342 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Wednesday 02 April 2025 12:22:33 -0400 (0:00:00.042) 0:04:47.384 ******* ok: [managed-node1] => { "changed": false, "path": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.volume", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Wednesday 02 April 2025 12:22:33 -0400 (0:00:00.381) 0:04:47.766 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Wednesday 02 April 2025 12:22:33 -0400 (0:00:00.047) 0:04:47.814 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Wednesday 02 April 2025 12:22:33 -0400 (0:00:00.045) 0:04:47.859 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Wednesday 02 April 2025 12:22:33 -0400 (0:00:00.061) 0:04:47.921 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Wednesday 02 April 2025 12:22:33 -0400 (0:00:00.043) 0:04:47.965 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Wednesday 02 April 2025 12:22:33 -0400 (0:00:00.065) 0:04:48.030 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Wednesday 02 April 2025 12:22:33 -0400 (0:00:00.110) 0:04:48.141 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Wednesday 02 April 2025 12:22:33 -0400 (0:00:00.075) 0:04:48.217 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Wednesday 02 April 2025 12:22:34 -0400 (0:00:00.158) 0:04:48.375 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_cancel_user_linger": [ "user_quadlet_basic" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Wednesday 02 April 2025 12:22:34 -0400 (0:00:00.084) 0:04:48.460 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Wednesday 02 April 2025 12:22:34 -0400 (0:00:00.072) 0:04:48.532 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Wednesday 02 April 2025 12:22:34 -0400 (0:00:00.064) 0:04:48.597 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Wednesday 02 April 2025 12:22:34 -0400 (0:00:00.057) 0:04:48.654 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Wednesday 02 April 2025 12:22:34 -0400 (0:00:00.059) 0:04:48.714 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Wednesday 02 April 2025 12:22:34 -0400 (0:00:00.050) 0:04:48.764 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Wednesday 02 April 2025 12:22:34 -0400 (0:00:00.046) 0:04:48.811 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Wednesday 02 April 2025 12:22:34 -0400 (0:00:00.044) 0:04:48.856 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Wednesday 02 April 2025 12:22:34 -0400 (0:00:00.042) 0:04:48.899 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Network": {} }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Wednesday 02 April 2025 12:22:34 -0400 (0:00:00.059) 0:04:48.958 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_basic" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Wednesday 02 April 2025 12:22:34 -0400 (0:00:00.072) 0:04:49.030 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Wednesday 02 April 2025 12:22:34 -0400 (0:00:00.066) 0:04:49.097 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-basic-unused-network", "__podman_quadlet_type": "network", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Wednesday 02 April 2025 12:22:34 -0400 (0:00:00.085) 0:04:49.183 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:22:34 -0400 (0:00:00.122) 0:04:49.306 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:22:35 -0400 (0:00:00.183) 0:04:49.489 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:22:35 -0400 (0:00:00.079) 0:04:49.569 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_group": "1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:22:35 -0400 (0:00:00.085) 0:04:49.654 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610609.2783134, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1743610533.3987217, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986653, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "4263604762", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:22:35 -0400 (0:00:00.419) 0:04:50.073 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_basic" ], "delta": "0:00:00.003339", "end": "2025-04-02 12:22:36.069675", "rc": 0, "start": "2025-04-02 12:22:36.066336" } STDOUT: 0: user_quadlet_basic 100000 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:22:36 -0400 (0:00:00.441) 0:04:50.515 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_basic" ], "delta": "0:00:00.003224", "end": "2025-04-02 12:22:36.539634", "rc": 0, "start": "2025-04-02 12:22:36.536410" } STDOUT: 0: user_quadlet_basic 100000 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:22:36 -0400 (0:00:00.458) 0:04:50.973 ******* ok: [managed-node1] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_basic": { "range": 65536, "start": 100000 } }, "podman_subuid_info": { "user_quadlet_basic": { "range": 65536, "start": 100000 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:22:36 -0400 (0:00:00.096) 0:04:51.070 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:22:36 -0400 (0:00:00.074) 0:04:51.145 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:22:36 -0400 (0:00:00.067) 0:04:51.213 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:22:36 -0400 (0:00:00.071) 0:04:51.284 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:22:37 -0400 (0:00:00.072) 0:04:51.357 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Wednesday 02 April 2025 12:22:37 -0400 (0:00:00.069) 0:04:51.426 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-basic-unused-network-network.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_basic", "__podman_xdg_runtime_dir": "/run/user/1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Wednesday 02 April 2025 12:22:37 -0400 (0:00:00.115) 0:04:51.541 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_basic/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Wednesday 02 April 2025 12:22:37 -0400 (0:00:00.173) 0:04:51.714 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Wednesday 02 April 2025 12:22:37 -0400 (0:00:00.065) 0:04:51.779 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-network.network", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Wednesday 02 April 2025 12:22:37 -0400 (0:00:00.151) 0:04:51.931 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Wednesday 02 April 2025 12:22:37 -0400 (0:00:00.076) 0:04:52.007 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Wednesday 02 April 2025 12:22:37 -0400 (0:00:00.148) 0:04:52.155 ******* ok: [managed-node1] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Wednesday 02 April 2025 12:22:38 -0400 (0:00:00.417) 0:04:52.573 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Wednesday 02 April 2025 12:22:38 -0400 (0:00:00.045) 0:04:52.619 ******* ok: [managed-node1] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Wednesday 02 April 2025 12:22:38 -0400 (0:00:00.379) 0:04:52.998 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Wednesday 02 April 2025 12:22:38 -0400 (0:00:00.042) 0:04:53.041 ******* ok: [managed-node1] => { "changed": false, "path": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-network.network", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Wednesday 02 April 2025 12:22:39 -0400 (0:00:00.380) 0:04:53.422 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Wednesday 02 April 2025 12:22:39 -0400 (0:00:00.046) 0:04:53.468 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Wednesday 02 April 2025 12:22:39 -0400 (0:00:00.044) 0:04:53.512 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Wednesday 02 April 2025 12:22:39 -0400 (0:00:00.130) 0:04:53.643 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Wednesday 02 April 2025 12:22:39 -0400 (0:00:00.050) 0:04:53.694 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Wednesday 02 April 2025 12:22:39 -0400 (0:00:00.046) 0:04:53.740 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Wednesday 02 April 2025 12:22:39 -0400 (0:00:00.073) 0:04:53.814 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Wednesday 02 April 2025 12:22:39 -0400 (0:00:00.044) 0:04:53.858 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Wednesday 02 April 2025 12:22:39 -0400 (0:00:00.043) 0:04:53.901 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_cancel_user_linger": [ "user_quadlet_basic" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Wednesday 02 April 2025 12:22:39 -0400 (0:00:00.049) 0:04:53.950 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Wednesday 02 April 2025 12:22:39 -0400 (0:00:00.044) 0:04:53.995 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Wednesday 02 April 2025 12:22:39 -0400 (0:00:00.044) 0:04:54.039 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Wednesday 02 April 2025 12:22:39 -0400 (0:00:00.045) 0:04:54.085 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Wednesday 02 April 2025 12:22:39 -0400 (0:00:00.046) 0:04:54.131 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Wednesday 02 April 2025 12:22:39 -0400 (0:00:00.043) 0:04:54.174 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Wednesday 02 April 2025 12:22:39 -0400 (0:00:00.045) 0:04:54.219 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Wednesday 02 April 2025 12:22:39 -0400 (0:00:00.050) 0:04:54.270 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Wednesday 02 April 2025 12:22:39 -0400 (0:00:00.065) 0:04:54.335 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Network]\nSubnet=192.168.29.0/24\nGateway=192.168.29.1\nLabel=app=wordpress\nNetworkName=quadlet-basic-name\n", "__podman_quadlet_template_src": "templates/quadlet-basic.network.j2" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Wednesday 02 April 2025 12:22:40 -0400 (0:00:00.271) 0:04:54.606 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_basic" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Wednesday 02 April 2025 12:22:40 -0400 (0:00:00.051) 0:04:54.658 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Wednesday 02 April 2025 12:22:40 -0400 (0:00:00.055) 0:04:54.714 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-basic", "__podman_quadlet_type": "network", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Wednesday 02 April 2025 12:22:40 -0400 (0:00:00.057) 0:04:54.771 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:22:40 -0400 (0:00:00.079) 0:04:54.851 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:22:40 -0400 (0:00:00.050) 0:04:54.902 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:22:40 -0400 (0:00:00.050) 0:04:54.952 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_group": "1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:22:40 -0400 (0:00:00.057) 0:04:55.010 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610609.2783134, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1743610533.3987217, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986653, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "4263604762", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:22:41 -0400 (0:00:00.414) 0:04:55.424 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_basic" ], "delta": "0:00:00.003335", "end": "2025-04-02 12:22:41.407726", "rc": 0, "start": "2025-04-02 12:22:41.404391" } STDOUT: 0: user_quadlet_basic 100000 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:22:41 -0400 (0:00:00.394) 0:04:55.819 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_basic" ], "delta": "0:00:00.003338", "end": "2025-04-02 12:22:41.810722", "rc": 0, "start": "2025-04-02 12:22:41.807384" } STDOUT: 0: user_quadlet_basic 100000 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:22:41 -0400 (0:00:00.402) 0:04:56.221 ******* ok: [managed-node1] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_basic": { "range": 65536, "start": 100000 } }, "podman_subuid_info": { "user_quadlet_basic": { "range": 65536, "start": 100000 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:22:42 -0400 (0:00:00.138) 0:04:56.360 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:22:42 -0400 (0:00:00.058) 0:04:56.419 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:22:42 -0400 (0:00:00.073) 0:04:56.493 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:22:42 -0400 (0:00:00.070) 0:04:56.563 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:22:42 -0400 (0:00:00.071) 0:04:56.634 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Wednesday 02 April 2025 12:22:42 -0400 (0:00:00.067) 0:04:56.702 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-basic-network.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_basic", "__podman_xdg_runtime_dir": "/run/user/1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Wednesday 02 April 2025 12:22:42 -0400 (0:00:00.098) 0:04:56.801 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_basic/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Wednesday 02 April 2025 12:22:42 -0400 (0:00:00.056) 0:04:56.857 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Wednesday 02 April 2025 12:22:42 -0400 (0:00:00.044) 0:04:56.901 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic.network", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Wednesday 02 April 2025 12:22:42 -0400 (0:00:00.102) 0:04:57.003 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Wednesday 02 April 2025 12:22:42 -0400 (0:00:00.048) 0:04:57.052 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Wednesday 02 April 2025 12:22:42 -0400 (0:00:00.121) 0:04:57.174 ******* ok: [managed-node1] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Wednesday 02 April 2025 12:22:43 -0400 (0:00:00.391) 0:04:57.565 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Wednesday 02 April 2025 12:22:43 -0400 (0:00:00.117) 0:04:57.682 ******* ok: [managed-node1] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Wednesday 02 April 2025 12:22:43 -0400 (0:00:00.427) 0:04:58.110 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Wednesday 02 April 2025 12:22:43 -0400 (0:00:00.066) 0:04:58.176 ******* ok: [managed-node1] => { "changed": false, "path": "/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic.network", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Wednesday 02 April 2025 12:22:44 -0400 (0:00:00.443) 0:04:58.620 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Wednesday 02 April 2025 12:22:44 -0400 (0:00:00.071) 0:04:58.691 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Wednesday 02 April 2025 12:22:44 -0400 (0:00:00.071) 0:04:58.763 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Wednesday 02 April 2025 12:22:44 -0400 (0:00:00.104) 0:04:58.868 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Wednesday 02 April 2025 12:22:44 -0400 (0:00:00.063) 0:04:58.932 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Wednesday 02 April 2025 12:22:44 -0400 (0:00:00.070) 0:04:59.002 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Wednesday 02 April 2025 12:22:44 -0400 (0:00:00.114) 0:04:59.117 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Wednesday 02 April 2025 12:22:44 -0400 (0:00:00.064) 0:04:59.182 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Wednesday 02 April 2025 12:22:44 -0400 (0:00:00.068) 0:04:59.250 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_cancel_user_linger": [ "user_quadlet_basic" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Wednesday 02 April 2025 12:22:44 -0400 (0:00:00.074) 0:04:59.324 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Wednesday 02 April 2025 12:22:45 -0400 (0:00:00.065) 0:04:59.390 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Wednesday 02 April 2025 12:22:45 -0400 (0:00:00.156) 0:04:59.546 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Wednesday 02 April 2025 12:22:45 -0400 (0:00:00.068) 0:04:59.615 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Wednesday 02 April 2025 12:22:45 -0400 (0:00:00.068) 0:04:59.684 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Wednesday 02 April 2025 12:22:45 -0400 (0:00:00.070) 0:04:59.755 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Wednesday 02 April 2025 12:22:45 -0400 (0:00:00.074) 0:04:59.829 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Wednesday 02 April 2025 12:22:45 -0400 (0:00:00.080) 0:04:59.910 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:198 Wednesday 02 April 2025 12:22:45 -0400 (0:00:00.066) 0:04:59.976 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:4 Wednesday 02 April 2025 12:22:45 -0400 (0:00:00.157) 0:05:00.134 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set cancel linger vars] *************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:11 Wednesday 02 April 2025 12:22:45 -0400 (0:00:00.048) 0:05:00.182 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_xdg_runtime_dir": "/run/user/1111" }, "changed": false } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:16 Wednesday 02 April 2025 12:22:45 -0400 (0:00:00.055) 0:05:00.238 ******* ok: [managed-node1] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Gather facts for containers] ********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:21 Wednesday 02 April 2025 12:22:46 -0400 (0:00:00.418) 0:05:00.656 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Gather facts for networks] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:30 Wednesday 02 April 2025 12:22:46 -0400 (0:00:00.064) 0:05:00.721 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Gather secrets] *********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:40 Wednesday 02 April 2025 12:22:46 -0400 (0:00:00.067) 0:05:00.789 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger if no more resources are in use] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:50 Wednesday 02 April 2025 12:22:46 -0400 (0:00:00.167) 0:05:00.957 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Wait for user session to exit closing state] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:62 Wednesday 02 April 2025 12:22:46 -0400 (0:00:00.068) 0:05:01.025 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop logind] ************************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:82 Wednesday 02 April 2025 12:22:46 -0400 (0:00:00.067) 0:05:01.092 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Wait for user session to exit closing state] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:87 Wednesday 02 April 2025 12:22:46 -0400 (0:00:00.067) 0:05:01.160 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Restart logind] *********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cancel_linger.yml:98 Wednesday 02 April 2025 12:22:46 -0400 (0:00:00.065) 0:05:01.226 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:204 Wednesday 02 April 2025 12:22:46 -0400 (0:00:00.066) 0:05:01.292 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:213 Wednesday 02 April 2025 12:22:47 -0400 (0:00:00.059) 0:05:01.352 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Remove test user] ******************************************************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:309 Wednesday 02 April 2025 12:22:47 -0400 (0:00:00.061) 0:05:01.413 ******* changed: [managed-node1] => { "changed": true, "force": false, "name": "user_quadlet_basic", "remove": false, "state": "absent" } TASK [Cleanup system - root] *************************************************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:315 Wednesday 02 April 2025 12:22:47 -0400 (0:00:00.548) 0:05:01.962 ******* TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Wednesday 02 April 2025 12:22:47 -0400 (0:00:00.140) 0:05:02.103 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Wednesday 02 April 2025 12:22:47 -0400 (0:00:00.114) 0:05:02.217 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Wednesday 02 April 2025 12:22:47 -0400 (0:00:00.081) 0:05:02.298 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Wednesday 02 April 2025 12:22:48 -0400 (0:00:00.175) 0:05:02.473 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Wednesday 02 April 2025 12:22:48 -0400 (0:00:00.068) 0:05:02.542 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Wednesday 02 April 2025 12:22:48 -0400 (0:00:00.067) 0:05:02.609 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Wednesday 02 April 2025 12:22:48 -0400 (0:00:00.069) 0:05:02.679 ******* ok: [managed-node1] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node1] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node1] => (item=CentOS_8.yml) => { "ansible_facts": { "__podman_packages": [ "crun", "podman", "podman-plugins", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } ok: [managed-node1] => (item=CentOS_8.yml) => { "ansible_facts": { "__podman_packages": [ "crun", "podman", "podman-plugins", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_8.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_8.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Wednesday 02 April 2025 12:22:48 -0400 (0:00:00.156) 0:05:02.835 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Wednesday 02 April 2025 12:22:50 -0400 (0:00:01.580) 0:05:04.416 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Wednesday 02 April 2025 12:22:50 -0400 (0:00:00.068) 0:05:04.484 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Wednesday 02 April 2025 12:22:50 -0400 (0:00:00.064) 0:05:04.549 ******* skipping: [managed-node1] => {} TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Wednesday 02 April 2025 12:22:50 -0400 (0:00:00.051) 0:05:04.600 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Wednesday 02 April 2025 12:22:50 -0400 (0:00:00.052) 0:05:04.652 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Wednesday 02 April 2025 12:22:50 -0400 (0:00:00.104) 0:05:04.757 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.027827", "end": "2025-04-02 12:22:50.764523", "rc": 0, "start": "2025-04-02 12:22:50.736696" } STDOUT: podman version 4.9.4-dev TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Wednesday 02 April 2025 12:22:50 -0400 (0:00:00.448) 0:05:05.205 ******* ok: [managed-node1] => { "ansible_facts": { "podman_version": "4.9.4-dev" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Wednesday 02 April 2025 12:22:50 -0400 (0:00:00.067) 0:05:05.273 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Wednesday 02 April 2025 12:22:51 -0400 (0:00:00.066) 0:05:05.339 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } META: end_host conditional evaluated to false, continuing execution for managed-node1 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Wednesday 02 April 2025 12:22:51 -0400 (0:00:00.217) 0:05:05.556 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } META: end_host conditional evaluated to false, continuing execution for managed-node1 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Wednesday 02 April 2025 12:22:51 -0400 (0:00:00.227) 0:05:05.784 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:22:51 -0400 (0:00:00.131) 0:05:05.915 ******* ok: [managed-node1] => { "ansible_facts": { "getent_passwd": { "root": [ "x", "0", "0", "root", "/root", "/bin/bash" ] } }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:22:52 -0400 (0:00:00.430) 0:05:06.346 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:22:52 -0400 (0:00:00.051) 0:05:06.397 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:22:52 -0400 (0:00:00.060) 0:05:06.458 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610609.2783134, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1743610533.3987217, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986653, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "4263604762", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:22:52 -0400 (0:00:00.378) 0:05:06.836 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:22:52 -0400 (0:00:00.043) 0:05:06.880 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:22:52 -0400 (0:00:00.045) 0:05:06.925 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:22:52 -0400 (0:00:00.042) 0:05:06.968 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:22:52 -0400 (0:00:00.043) 0:05:07.012 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:22:52 -0400 (0:00:00.044) 0:05:07.056 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:22:52 -0400 (0:00:00.042) 0:05:07.098 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:22:52 -0400 (0:00:00.053) 0:05:07.152 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Wednesday 02 April 2025 12:22:52 -0400 (0:00:00.046) 0:05:07.198 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_parent_mode": "0755", "__podman_parent_path": "/etc/containers", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:126 Wednesday 02 April 2025 12:22:52 -0400 (0:00:00.057) 0:05:07.256 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Wednesday 02 April 2025 12:22:53 -0400 (0:00:00.086) 0:05:07.342 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Wednesday 02 April 2025 12:22:53 -0400 (0:00:00.069) 0:05:07.412 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:129 Wednesday 02 April 2025 12:22:53 -0400 (0:00:00.046) 0:05:07.458 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Wednesday 02 April 2025 12:22:53 -0400 (0:00:00.182) 0:05:07.641 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Wednesday 02 April 2025 12:22:53 -0400 (0:00:00.053) 0:05:07.694 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:132 Wednesday 02 April 2025 12:22:53 -0400 (0:00:00.042) 0:05:07.737 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:7 Wednesday 02 April 2025 12:22:53 -0400 (0:00:00.124) 0:05:07.861 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:15 Wednesday 02 April 2025 12:22:53 -0400 (0:00:00.064) 0:05:07.925 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:135 Wednesday 02 April 2025 12:22:53 -0400 (0:00:00.053) 0:05:07.979 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:8 Wednesday 02 April 2025 12:22:53 -0400 (0:00:00.108) 0:05:08.087 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:16 Wednesday 02 April 2025 12:22:53 -0400 (0:00:00.046) 0:05:08.134 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:21 Wednesday 02 April 2025 12:22:53 -0400 (0:00:00.050) 0:05:08.185 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:27 Wednesday 02 April 2025 12:22:53 -0400 (0:00:00.041) 0:05:08.226 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:141 Wednesday 02 April 2025 12:22:53 -0400 (0:00:00.039) 0:05:08.266 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:148 Wednesday 02 April 2025 12:22:53 -0400 (0:00:00.048) 0:05:08.314 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:155 Wednesday 02 April 2025 12:22:54 -0400 (0:00:00.066) 0:05:08.380 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:159 Wednesday 02 April 2025 12:22:54 -0400 (0:00:00.062) 0:05:08.442 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:168 Wednesday 02 April 2025 12:22:54 -0400 (0:00:00.144) 0:05:08.587 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:177 Wednesday 02 April 2025 12:22:54 -0400 (0:00:00.046) 0:05:08.634 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node1 included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Wednesday 02 April 2025 12:22:54 -0400 (0:00:00.156) 0:05:08.790 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Wednesday 02 April 2025 12:22:54 -0400 (0:00:00.044) 0:05:08.835 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:22:54 -0400 (0:00:00.075) 0:05:08.910 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:22:54 -0400 (0:00:00.064) 0:05:08.975 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:22:54 -0400 (0:00:00.074) 0:05:09.049 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:22:54 -0400 (0:00:00.066) 0:05:09.116 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:22:54 -0400 (0:00:00.053) 0:05:09.169 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:22:54 -0400 (0:00:00.059) 0:05:09.229 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:22:54 -0400 (0:00:00.039) 0:05:09.268 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:22:54 -0400 (0:00:00.042) 0:05:09.311 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:22:55 -0400 (0:00:00.041) 0:05:09.353 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:22:55 -0400 (0:00:00.039) 0:05:09.392 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:22:55 -0400 (0:00:00.108) 0:05:09.501 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:22:55 -0400 (0:00:00.041) 0:05:09.543 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14 Wednesday 02 April 2025 12:22:55 -0400 (0:00:00.040) 0:05:09.583 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_rootless": false, "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20 Wednesday 02 April 2025 12:22:55 -0400 (0:00:00.060) 0:05:09.643 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Wednesday 02 April 2025 12:22:55 -0400 (0:00:00.123) 0:05:09.767 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Wednesday 02 April 2025 12:22:55 -0400 (0:00:00.067) 0:05:09.835 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Wednesday 02 April 2025 12:22:55 -0400 (0:00:00.067) 0:05:09.902 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25 Wednesday 02 April 2025 12:22:55 -0400 (0:00:00.053) 0:05:09.956 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41 Wednesday 02 April 2025 12:22:55 -0400 (0:00:00.046) 0:05:10.002 ******* [WARNING]: Using a variable for a task's 'args' is unsafe in some situations (see https://docs.ansible.com/ansible/devel/reference_appendices/faq.html#argsplat- unsafe) changed: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Wednesday 02 April 2025 12:22:56 -0400 (0:00:00.430) 0:05:10.432 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Wednesday 02 April 2025 12:22:56 -0400 (0:00:00.045) 0:05:10.478 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:22:56 -0400 (0:00:00.103) 0:05:10.582 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:22:56 -0400 (0:00:00.117) 0:05:10.699 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:22:56 -0400 (0:00:00.049) 0:05:10.748 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:22:56 -0400 (0:00:00.058) 0:05:10.807 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:22:56 -0400 (0:00:00.042) 0:05:10.849 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:22:56 -0400 (0:00:00.040) 0:05:10.889 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:22:56 -0400 (0:00:00.051) 0:05:10.941 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:22:56 -0400 (0:00:00.065) 0:05:11.007 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:22:56 -0400 (0:00:00.062) 0:05:11.070 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:22:56 -0400 (0:00:00.064) 0:05:11.135 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:22:56 -0400 (0:00:00.065) 0:05:11.200 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:22:56 -0400 (0:00:00.064) 0:05:11.265 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14 Wednesday 02 April 2025 12:22:56 -0400 (0:00:00.066) 0:05:11.331 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_rootless": false, "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20 Wednesday 02 April 2025 12:22:57 -0400 (0:00:00.063) 0:05:11.395 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Wednesday 02 April 2025 12:22:57 -0400 (0:00:00.092) 0:05:11.488 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Wednesday 02 April 2025 12:22:57 -0400 (0:00:00.040) 0:05:11.528 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Wednesday 02 April 2025 12:22:57 -0400 (0:00:00.040) 0:05:11.569 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25 Wednesday 02 April 2025 12:22:57 -0400 (0:00:00.117) 0:05:11.686 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41 Wednesday 02 April 2025 12:22:57 -0400 (0:00:00.056) 0:05:11.742 ******* [WARNING]: Using a variable for a task's 'args' is unsafe in some situations (see https://docs.ansible.com/ansible/devel/reference_appendices/faq.html#argsplat- unsafe) changed: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:184 Wednesday 02 April 2025 12:22:57 -0400 (0:00:00.476) 0:05:12.219 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:191 Wednesday 02 April 2025 12:22:57 -0400 (0:00:00.049) 0:05:12.269 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Wednesday 02 April 2025 12:22:58 -0400 (0:00:00.194) 0:05:12.463 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Container": { "ContainerName": "quadlet-basic-mysql-name", "Environment": [ "FOO=/bin/busybox-extras", "BAZ=test" ], "Image": "quay.io/linux-system-roles/mysql:5.6", "Network": "quadlet-basic.network", "PodmanArgs": "--secret=mysql_container_root_password,type=env,target=MYSQL_ROOT_PASSWORD --secret=json_secret,type=mount,target=/tmp/test.json", "Volume": "quadlet-basic-mysql.volume:/var/lib/mysql" }, "Install": { "WantedBy": "default.target" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Wednesday 02 April 2025 12:22:58 -0400 (0:00:00.051) 0:05:12.514 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Wednesday 02 April 2025 12:22:58 -0400 (0:00:00.050) 0:05:12.565 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Wednesday 02 April 2025 12:22:58 -0400 (0:00:00.041) 0:05:12.606 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-basic-mysql", "__podman_quadlet_type": "container", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Wednesday 02 April 2025 12:22:58 -0400 (0:00:00.071) 0:05:12.678 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:22:58 -0400 (0:00:00.107) 0:05:12.785 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:22:58 -0400 (0:00:00.055) 0:05:12.841 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:22:58 -0400 (0:00:00.133) 0:05:12.974 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:22:58 -0400 (0:00:00.057) 0:05:13.031 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610609.2783134, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1743610533.3987217, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986653, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "4263604762", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:22:59 -0400 (0:00:00.389) 0:05:13.420 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:22:59 -0400 (0:00:00.044) 0:05:13.465 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:22:59 -0400 (0:00:00.042) 0:05:13.508 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:22:59 -0400 (0:00:00.051) 0:05:13.559 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:22:59 -0400 (0:00:00.060) 0:05:13.620 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:22:59 -0400 (0:00:00.069) 0:05:13.690 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:22:59 -0400 (0:00:00.061) 0:05:13.751 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:22:59 -0400 (0:00:00.055) 0:05:13.806 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Wednesday 02 April 2025 12:22:59 -0400 (0:00:00.052) 0:05:13.859 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [ "quay.io/linux-system-roles/mysql:5.6" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-basic-mysql.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Wednesday 02 April 2025 12:22:59 -0400 (0:00:00.082) 0:05:13.941 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Wednesday 02 April 2025 12:22:59 -0400 (0:00:00.044) 0:05:13.986 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Wednesday 02 April 2025 12:22:59 -0400 (0:00:00.039) 0:05:14.025 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_images": [ "quay.io/linux-system-roles/mysql:5.6" ], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-basic-mysql.container", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Wednesday 02 April 2025 12:22:59 -0400 (0:00:00.102) 0:05:14.128 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Wednesday 02 April 2025 12:22:59 -0400 (0:00:00.153) 0:05:14.281 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Wednesday 02 April 2025 12:23:00 -0400 (0:00:00.159) 0:05:14.440 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Wednesday 02 April 2025 12:23:00 -0400 (0:00:00.047) 0:05:14.488 ******* changed: [managed-node1] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-basic-mysql.service", "state": "stopped", "status": { "ActiveEnterTimestamp": "Wed 2025-04-02 12:22:07 EDT", "ActiveEnterTimestampMonotonic": "197713689", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "quadlet-basic-network.service sysinit.target system.slice -.mount basic.target systemd-journald.socket quadlet-basic-mysql-volume.service", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "yes", "AssertTimestamp": "Wed 2025-04-02 12:22:07 EDT", "AssertTimestampMonotonic": "197304330", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Wed 2025-04-02 12:22:07 EDT", "ConditionTimestampMonotonic": "197304328", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/system.slice/quadlet-basic-mysql.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "yes", "DelegateControllers": "cpu cpuacct cpuset io blkio memory devices pids", "Description": "quadlet-basic-mysql.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveCPUs": "0-1", "EffectiveMemoryNodes": "0", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-basic-mysql.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "29030", "ExecMainStartTimestamp": "Wed 2025-04-02 12:22:07 EDT", "ExecMainStartTimestampMonotonic": "197713669", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name=quadlet-basic-mysql-name --cidfile=/run/quadlet-basic-mysql.cid --replace --rm --cgroups=split --network=quadlet-basic-name --sdnotify=conmon -d -v quadlet-basic-mysql-name:/var/lib/mysql --env BAZ=test --env FOO=/bin/busybox-extras --secret=mysql_container_root_password,type=env,target=MYSQL_ROOT_PASSWORD --secret=json_secret,type=mount,target=/tmp/test.json quay.io/linux-system-roles/mysql:5.6 ; ignore_errors=no ; start_time=[Wed 2025-04-02 12:22:07 EDT] ; stop_time=[n/a] ; pid=28849 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-basic-mysql.cid ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-basic-mysql.cid ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/run/systemd/generator/quadlet-basic-mysql.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "quadlet-basic-mysql.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Wed 2025-04-02 12:22:07 EDT", "InactiveExitTimestampMonotonic": "197309303", "InvocationID": "d30f150e51f149e68491ee839a41fcad", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "29030", "MemoryAccounting": "yes", "MemoryCurrent": "602021888", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "quadlet-basic-mysql.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "quadlet-basic-mysql-volume.service sysinit.target -.mount quadlet-basic-network.service system.slice", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-basic-mysql.container", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Wed 2025-04-02 12:22:07 EDT", "StateChangeTimestampMonotonic": "197713689", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-basic-mysql", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "23", "TasksMax": "22405", "TimeoutStartUSec": "1min 30s", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WatchdogTimestamp": "Wed 2025-04-02 12:22:07 EDT", "WatchdogTimestampMonotonic": "197713686", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Wednesday 02 April 2025 12:23:02 -0400 (0:00:02.823) 0:05:17.311 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610926.6999424, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "0b6cac7929623f1059e78ef39b8b0a25169b28a6", "ctime": 1743610926.1489372, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 442499230, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1743610925.8839347, "nlink": 1, "path": "/etc/containers/systemd/quadlet-basic-mysql.container", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 448, "uid": 0, "version": "4258272651", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Wednesday 02 April 2025 12:23:03 -0400 (0:00:00.464) 0:05:17.775 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Wednesday 02 April 2025 12:23:03 -0400 (0:00:00.197) 0:05:17.973 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Wednesday 02 April 2025 12:23:04 -0400 (0:00:00.386) 0:05:18.360 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Wednesday 02 April 2025 12:23:04 -0400 (0:00:00.075) 0:05:18.435 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Wednesday 02 April 2025 12:23:04 -0400 (0:00:00.064) 0:05:18.501 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Wednesday 02 April 2025 12:23:04 -0400 (0:00:00.080) 0:05:18.581 ******* changed: [managed-node1] => { "changed": true, "path": "/etc/containers/systemd/quadlet-basic-mysql.container", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Wednesday 02 April 2025 12:23:04 -0400 (0:00:00.420) 0:05:19.002 ******* ok: [managed-node1] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Wednesday 02 April 2025 12:23:05 -0400 (0:00:00.704) 0:05:19.707 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Wednesday 02 April 2025 12:23:05 -0400 (0:00:00.453) 0:05:20.160 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Wednesday 02 April 2025 12:23:05 -0400 (0:00:00.057) 0:05:20.218 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Wednesday 02 April 2025 12:23:06 -0400 (0:00:00.135) 0:05:20.354 ******* changed: [managed-node1] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.246443", "end": "2025-04-02 12:23:06.589279", "rc": 0, "start": "2025-04-02 12:23:06.342836" } STDOUT: dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Wednesday 02 April 2025 12:23:06 -0400 (0:00:00.656) 0:05:21.010 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Wednesday 02 April 2025 12:23:06 -0400 (0:00:00.073) 0:05:21.084 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Wednesday 02 April 2025 12:23:06 -0400 (0:00:00.042) 0:05:21.126 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Wednesday 02 April 2025 12:23:06 -0400 (0:00:00.040) 0:05:21.167 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Wednesday 02 April 2025 12:23:06 -0400 (0:00:00.048) 0:05:21.215 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.036788", "end": "2025-04-02 12:23:07.250238", "rc": 0, "start": "2025-04-02 12:23:07.213450" } STDOUT: quay.io/libpod/registry 2.8.2 0030ba3d620c 20 months ago 24.6 MB TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Wednesday 02 April 2025 12:23:07 -0400 (0:00:00.453) 0:05:21.669 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.034778", "end": "2025-04-02 12:23:07.710222", "rc": 0, "start": "2025-04-02 12:23:07.675444" } STDOUT: local c1fc31f785346f7a5c9afe68ea235eba6fe181647a5c2fb992f846acb33ed7e7 local quadlet-basic-mysql-name local systemd-quadlet-basic-unused-volume TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Wednesday 02 April 2025 12:23:07 -0400 (0:00:00.454) 0:05:22.123 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.039732", "end": "2025-04-02 12:23:08.161218", "rc": 0, "start": "2025-04-02 12:23:08.121486" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Wednesday 02 April 2025 12:23:08 -0400 (0:00:00.471) 0:05:22.595 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.062617", "end": "2025-04-02 12:23:08.637534", "rc": 0, "start": "2025-04-02 12:23:08.574917" } STDOUT: podman quadlet-basic-name systemd-quadlet-basic-unused-network TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Wednesday 02 April 2025 12:23:08 -0400 (0:00:00.535) 0:05:23.130 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Wednesday 02 April 2025 12:23:09 -0400 (0:00:00.420) 0:05:23.551 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Wednesday 02 April 2025 12:23:09 -0400 (0:00:00.426) 0:05:23.977 ******* ok: [managed-node1] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "certmonger.service": { "name": "certmonger.service", "source": "systemd", "state": "running", "status": "enabled" }, "chrony-dnssrv@.service": { "name": "chrony-dnssrv@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cni-dhcp.service": { "name": "cni-dhcp.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "static" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus-org.freedesktop.portable1.service": { "name": "dbus-org.freedesktop.portable1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "running", "status": "static" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dnsmasq.service": { "name": "dnsmasq.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "unknown" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "halt-local.service": { "name": "halt-local.service", "source": "systemd", "state": "inactive", "status": "static" }, "import-state.service": { "name": "import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "iprdump.service": { "name": "iprdump.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprinit.service": { "name": "iprinit.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprupdate.service": { "name": "iprupdate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "loadmodules.service": { "name": "loadmodules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "messagebus.service": { "name": "messagebus.service", "source": "systemd", "state": "active", "status": "static" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-convert.service": { "name": "nfs-convert.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-halt.service": { "name": "plymouth-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-kexec.service": { "name": "plymouth-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-poweroff.service": { "name": "plymouth-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-quit.service": { "name": "plymouth-quit.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-read-write.service": { "name": "plymouth-read-write.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-reboot.service": { "name": "plymouth-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-switch-root-initramfs.service": { "name": "plymouth-switch-root-initramfs.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-switch-root.service": { "name": "plymouth-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quadlet-basic-mysql-volume.service": { "name": "quadlet-basic-mysql-volume.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quadlet-basic-network.service": { "name": "quadlet-basic-network.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quadlet-basic-unused-network-network.service": { "name": "quadlet-basic-unused-network-network.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quadlet-basic-unused-volume-volume.service": { "name": "quadlet-basic-unused-volume-volume.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "active", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-plymouth.service": { "name": "systemd-ask-password-plymouth.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-portabled.service": { "name": "systemd-portabled.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-resolved.service": { "name": "systemd-resolved.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "masked" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "tcsd.service": { "name": "tcsd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "timedatex.service": { "name": "timedatex.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "tuned.service": { "name": "tuned.service", "source": "systemd", "state": "running", "status": "enabled" }, "unbound-anchor.service": { "name": "unbound-anchor.service", "source": "systemd", "state": "stopped", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "unknown" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Wednesday 02 April 2025 12:23:11 -0400 (0:00:01.651) 0:05:25.628 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Wednesday 02 April 2025 12:23:11 -0400 (0:00:00.050) 0:05:25.679 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Volume": {} }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Wednesday 02 April 2025 12:23:11 -0400 (0:00:00.052) 0:05:25.732 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Wednesday 02 April 2025 12:23:11 -0400 (0:00:00.051) 0:05:25.783 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Wednesday 02 April 2025 12:23:11 -0400 (0:00:00.110) 0:05:25.894 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-basic-unused-volume", "__podman_quadlet_type": "volume", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Wednesday 02 April 2025 12:23:11 -0400 (0:00:00.059) 0:05:25.953 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:23:11 -0400 (0:00:00.077) 0:05:26.031 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:23:11 -0400 (0:00:00.054) 0:05:26.085 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:23:11 -0400 (0:00:00.051) 0:05:26.137 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:23:11 -0400 (0:00:00.059) 0:05:26.197 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610609.2783134, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1743610533.3987217, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986653, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "4263604762", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:23:12 -0400 (0:00:00.387) 0:05:26.585 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:23:12 -0400 (0:00:00.053) 0:05:26.638 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:23:12 -0400 (0:00:00.063) 0:05:26.702 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:23:12 -0400 (0:00:00.074) 0:05:26.777 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:23:12 -0400 (0:00:00.069) 0:05:26.846 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:23:12 -0400 (0:00:00.066) 0:05:26.913 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:23:12 -0400 (0:00:00.069) 0:05:26.982 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:23:12 -0400 (0:00:00.168) 0:05:27.150 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Wednesday 02 April 2025 12:23:12 -0400 (0:00:00.066) 0:05:27.217 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-basic-unused-volume-volume.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Wednesday 02 April 2025 12:23:12 -0400 (0:00:00.119) 0:05:27.336 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Wednesday 02 April 2025 12:23:13 -0400 (0:00:00.075) 0:05:27.412 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Wednesday 02 April 2025 12:23:13 -0400 (0:00:00.064) 0:05:27.477 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-basic-unused-volume.volume", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Wednesday 02 April 2025 12:23:13 -0400 (0:00:00.150) 0:05:27.627 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Wednesday 02 April 2025 12:23:13 -0400 (0:00:00.061) 0:05:27.688 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Wednesday 02 April 2025 12:23:13 -0400 (0:00:00.122) 0:05:27.810 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Wednesday 02 April 2025 12:23:13 -0400 (0:00:00.042) 0:05:27.853 ******* changed: [managed-node1] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-basic-unused-volume-volume.service", "state": "stopped", "status": { "ActiveEnterTimestamp": "Wed 2025-04-02 12:21:53 EDT", "ActiveEnterTimestampMonotonic": "183948529", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "-.mount systemd-journald.socket sysinit.target basic.target system.slice", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "yes", "AssertTimestamp": "Wed 2025-04-02 12:21:53 EDT", "AssertTimestampMonotonic": "183902054", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Wed 2025-04-02 12:21:53 EDT", "ConditionTimestampMonotonic": "183902053", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/system.slice/quadlet-basic-unused-volume-volume.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "quadlet-basic-unused-volume-volume.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "ExecMainCode": "1", "ExecMainExitTimestamp": "Wed 2025-04-02 12:21:53 EDT", "ExecMainExitTimestampMonotonic": "183948375", "ExecMainPID": "27874", "ExecMainStartTimestamp": "Wed 2025-04-02 12:21:53 EDT", "ExecMainStartTimestampMonotonic": "183902845", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore systemd-quadlet-basic-unused-volume ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/run/systemd/generator/quadlet-basic-unused-volume-volume.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "quadlet-basic-unused-volume-volume.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Wed 2025-04-02 12:21:53 EDT", "InactiveExitTimestampMonotonic": "183902889", "InvocationID": "b55f0c65672d4d68813fe64e4795c46f", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "MemoryAccounting": "yes", "MemoryCurrent": "0", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "quadlet-basic-unused-volume-volume.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "-.mount system.slice sysinit.target", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Wed 2025-04-02 12:21:53 EDT", "StateChangeTimestampMonotonic": "183948529", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "exited", "SuccessAction": "none", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-basic-unused-volume-volume", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "0", "TasksMax": "22405", "TimeoutStartUSec": "infinity", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Wednesday 02 April 2025 12:23:14 -0400 (0:00:00.720) 0:05:28.573 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610913.2458165, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "fd0ae560360afa5541b866560b1e849d25e216ef", "ctime": 1743610912.6678112, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 285212866, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1743610912.3728082, "nlink": 1, "path": "/etc/containers/systemd/quadlet-basic-unused-volume.volume", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 53, "uid": 0, "version": "2616880427", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Wednesday 02 April 2025 12:23:14 -0400 (0:00:00.460) 0:05:29.034 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Wednesday 02 April 2025 12:23:14 -0400 (0:00:00.172) 0:05:29.206 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Wednesday 02 April 2025 12:23:15 -0400 (0:00:00.405) 0:05:29.611 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Wednesday 02 April 2025 12:23:15 -0400 (0:00:00.067) 0:05:29.679 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Wednesday 02 April 2025 12:23:15 -0400 (0:00:00.042) 0:05:29.721 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Wednesday 02 April 2025 12:23:15 -0400 (0:00:00.040) 0:05:29.762 ******* changed: [managed-node1] => { "changed": true, "path": "/etc/containers/systemd/quadlet-basic-unused-volume.volume", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Wednesday 02 April 2025 12:23:15 -0400 (0:00:00.435) 0:05:30.198 ******* ok: [managed-node1] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Wednesday 02 April 2025 12:23:16 -0400 (0:00:00.632) 0:05:30.830 ******* changed: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Wednesday 02 April 2025 12:23:16 -0400 (0:00:00.466) 0:05:31.297 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Wednesday 02 April 2025 12:23:17 -0400 (0:00:00.071) 0:05:31.369 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Wednesday 02 April 2025 12:23:17 -0400 (0:00:00.051) 0:05:31.420 ******* changed: [managed-node1] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.035234", "end": "2025-04-02 12:23:17.427329", "rc": 0, "start": "2025-04-02 12:23:17.392095" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Wednesday 02 April 2025 12:23:17 -0400 (0:00:00.500) 0:05:31.921 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Wednesday 02 April 2025 12:23:17 -0400 (0:00:00.105) 0:05:32.026 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Wednesday 02 April 2025 12:23:17 -0400 (0:00:00.048) 0:05:32.075 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Wednesday 02 April 2025 12:23:17 -0400 (0:00:00.050) 0:05:32.125 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Wednesday 02 April 2025 12:23:17 -0400 (0:00:00.046) 0:05:32.172 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.038356", "end": "2025-04-02 12:23:18.177418", "rc": 0, "start": "2025-04-02 12:23:18.139062" } STDOUT: quay.io/libpod/registry 2.8.2 0030ba3d620c 20 months ago 24.6 MB TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Wednesday 02 April 2025 12:23:18 -0400 (0:00:00.422) 0:05:32.595 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.032621", "end": "2025-04-02 12:23:18.604063", "rc": 0, "start": "2025-04-02 12:23:18.571442" } STDOUT: local c1fc31f785346f7a5c9afe68ea235eba6fe181647a5c2fb992f846acb33ed7e7 local quadlet-basic-mysql-name TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Wednesday 02 April 2025 12:23:18 -0400 (0:00:00.432) 0:05:33.027 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.043377", "end": "2025-04-02 12:23:19.040259", "rc": 0, "start": "2025-04-02 12:23:18.996882" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Wednesday 02 April 2025 12:23:19 -0400 (0:00:00.434) 0:05:33.461 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.060474", "end": "2025-04-02 12:23:19.494200", "rc": 0, "start": "2025-04-02 12:23:19.433726" } STDOUT: podman quadlet-basic-name systemd-quadlet-basic-unused-network TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Wednesday 02 April 2025 12:23:19 -0400 (0:00:00.455) 0:05:33.916 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Wednesday 02 April 2025 12:23:20 -0400 (0:00:00.425) 0:05:34.341 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Wednesday 02 April 2025 12:23:20 -0400 (0:00:00.430) 0:05:34.772 ******* ok: [managed-node1] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "certmonger.service": { "name": "certmonger.service", "source": "systemd", "state": "running", "status": "enabled" }, "chrony-dnssrv@.service": { "name": "chrony-dnssrv@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cni-dhcp.service": { "name": "cni-dhcp.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "static" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus-org.freedesktop.portable1.service": { "name": "dbus-org.freedesktop.portable1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "running", "status": "static" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dnsmasq.service": { "name": "dnsmasq.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "unknown" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "halt-local.service": { "name": "halt-local.service", "source": "systemd", "state": "inactive", "status": "static" }, "import-state.service": { "name": "import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "iprdump.service": { "name": "iprdump.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprinit.service": { "name": "iprinit.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprupdate.service": { "name": "iprupdate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "loadmodules.service": { "name": "loadmodules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "messagebus.service": { "name": "messagebus.service", "source": "systemd", "state": "active", "status": "static" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-convert.service": { "name": "nfs-convert.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-halt.service": { "name": "plymouth-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-kexec.service": { "name": "plymouth-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-poweroff.service": { "name": "plymouth-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-quit.service": { "name": "plymouth-quit.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-read-write.service": { "name": "plymouth-read-write.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-reboot.service": { "name": "plymouth-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-switch-root-initramfs.service": { "name": "plymouth-switch-root-initramfs.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-switch-root.service": { "name": "plymouth-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quadlet-basic-mysql-volume.service": { "name": "quadlet-basic-mysql-volume.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quadlet-basic-network.service": { "name": "quadlet-basic-network.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quadlet-basic-unused-network-network.service": { "name": "quadlet-basic-unused-network-network.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "active", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-plymouth.service": { "name": "systemd-ask-password-plymouth.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-portabled.service": { "name": "systemd-portabled.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-resolved.service": { "name": "systemd-resolved.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "masked" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "tcsd.service": { "name": "tcsd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "timedatex.service": { "name": "timedatex.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "tuned.service": { "name": "tuned.service", "source": "systemd", "state": "running", "status": "enabled" }, "unbound-anchor.service": { "name": "unbound-anchor.service", "source": "systemd", "state": "stopped", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "unknown" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Wednesday 02 April 2025 12:23:22 -0400 (0:00:01.660) 0:05:36.432 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Wednesday 02 April 2025 12:23:22 -0400 (0:00:00.066) 0:05:36.499 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Volume": { "VolumeName": "quadlet-basic-mysql-name" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Wednesday 02 April 2025 12:23:22 -0400 (0:00:00.081) 0:05:36.581 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Wednesday 02 April 2025 12:23:22 -0400 (0:00:00.077) 0:05:36.658 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Wednesday 02 April 2025 12:23:22 -0400 (0:00:00.067) 0:05:36.726 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-basic-mysql", "__podman_quadlet_type": "volume", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Wednesday 02 April 2025 12:23:22 -0400 (0:00:00.092) 0:05:36.818 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:23:22 -0400 (0:00:00.225) 0:05:37.044 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:23:22 -0400 (0:00:00.077) 0:05:37.122 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:23:22 -0400 (0:00:00.055) 0:05:37.177 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:23:22 -0400 (0:00:00.074) 0:05:37.251 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610609.2783134, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1743610533.3987217, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986653, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "4263604762", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:23:23 -0400 (0:00:00.411) 0:05:37.663 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:23:23 -0400 (0:00:00.069) 0:05:37.732 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:23:23 -0400 (0:00:00.068) 0:05:37.801 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:23:23 -0400 (0:00:00.067) 0:05:37.868 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:23:23 -0400 (0:00:00.071) 0:05:37.940 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:23:23 -0400 (0:00:00.069) 0:05:38.009 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:23:23 -0400 (0:00:00.078) 0:05:38.088 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:23:23 -0400 (0:00:00.069) 0:05:38.158 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Wednesday 02 April 2025 12:23:23 -0400 (0:00:00.068) 0:05:38.226 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-basic-mysql-volume.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Wednesday 02 April 2025 12:23:24 -0400 (0:00:00.112) 0:05:38.339 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Wednesday 02 April 2025 12:23:24 -0400 (0:00:00.164) 0:05:38.504 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Wednesday 02 April 2025 12:23:24 -0400 (0:00:00.068) 0:05:38.572 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-basic-mysql.volume", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Wednesday 02 April 2025 12:23:24 -0400 (0:00:00.146) 0:05:38.719 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Wednesday 02 April 2025 12:23:24 -0400 (0:00:00.077) 0:05:38.797 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Wednesday 02 April 2025 12:23:24 -0400 (0:00:00.119) 0:05:38.917 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Wednesday 02 April 2025 12:23:24 -0400 (0:00:00.050) 0:05:38.968 ******* changed: [managed-node1] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-basic-mysql-volume.service", "state": "stopped", "status": { "ActiveEnterTimestamp": "Wed 2025-04-02 12:21:48 EDT", "ActiveEnterTimestampMonotonic": "178581549", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "system.slice sysinit.target basic.target systemd-journald.socket -.mount", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "yes", "AssertTimestamp": "Wed 2025-04-02 12:21:48 EDT", "AssertTimestampMonotonic": "178532668", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Wed 2025-04-02 12:21:48 EDT", "ConditionTimestampMonotonic": "178532667", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/system.slice/quadlet-basic-mysql-volume.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "quadlet-basic-mysql-volume.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "ExecMainCode": "1", "ExecMainExitTimestamp": "Wed 2025-04-02 12:21:48 EDT", "ExecMainExitTimestampMonotonic": "178581370", "ExecMainPID": "27114", "ExecMainStartTimestamp": "Wed 2025-04-02 12:21:48 EDT", "ExecMainStartTimestampMonotonic": "178533530", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore quadlet-basic-mysql-name ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/run/systemd/generator/quadlet-basic-mysql-volume.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "quadlet-basic-mysql-volume.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Wed 2025-04-02 12:21:48 EDT", "InactiveExitTimestampMonotonic": "178533574", "InvocationID": "f9074f6344864df3abce7165d6473bd6", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "MemoryAccounting": "yes", "MemoryCurrent": "0", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "quadlet-basic-mysql-volume.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "system.slice sysinit.target -.mount", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Wed 2025-04-02 12:21:48 EDT", "StateChangeTimestampMonotonic": "178581549", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "exited", "SuccessAction": "none", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-basic-mysql-volume", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "0", "TasksMax": "22405", "TimeoutStartUSec": "infinity", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Wednesday 02 April 2025 12:23:25 -0400 (0:00:00.678) 0:05:39.646 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610907.9157667, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "90a3571bfc7670328fe3f8fb625585613dbd9c4a", "ctime": 1743610907.2857606, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 257949890, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1743610907.0237582, "nlink": 1, "path": "/etc/containers/systemd/quadlet-basic-mysql.volume", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 89, "uid": 0, "version": "1059389119", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Wednesday 02 April 2025 12:23:25 -0400 (0:00:00.386) 0:05:40.032 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Wednesday 02 April 2025 12:23:25 -0400 (0:00:00.076) 0:05:40.108 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Wednesday 02 April 2025 12:23:26 -0400 (0:00:00.442) 0:05:40.551 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Wednesday 02 April 2025 12:23:26 -0400 (0:00:00.059) 0:05:40.611 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Wednesday 02 April 2025 12:23:26 -0400 (0:00:00.043) 0:05:40.654 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Wednesday 02 April 2025 12:23:26 -0400 (0:00:00.040) 0:05:40.695 ******* changed: [managed-node1] => { "changed": true, "path": "/etc/containers/systemd/quadlet-basic-mysql.volume", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Wednesday 02 April 2025 12:23:26 -0400 (0:00:00.391) 0:05:41.086 ******* ok: [managed-node1] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Wednesday 02 April 2025 12:23:27 -0400 (0:00:00.660) 0:05:41.747 ******* changed: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Wednesday 02 April 2025 12:23:27 -0400 (0:00:00.481) 0:05:42.228 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Wednesday 02 April 2025 12:23:27 -0400 (0:00:00.068) 0:05:42.297 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Wednesday 02 April 2025 12:23:28 -0400 (0:00:00.049) 0:05:42.347 ******* changed: [managed-node1] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:01.034660", "end": "2025-04-02 12:23:29.355228", "rc": 0, "start": "2025-04-02 12:23:28.320568" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Wednesday 02 April 2025 12:23:29 -0400 (0:00:01.445) 0:05:43.792 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Wednesday 02 April 2025 12:23:29 -0400 (0:00:00.103) 0:05:43.895 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Wednesday 02 April 2025 12:23:29 -0400 (0:00:00.054) 0:05:43.950 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Wednesday 02 April 2025 12:23:29 -0400 (0:00:00.050) 0:05:44.000 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Wednesday 02 April 2025 12:23:29 -0400 (0:00:00.043) 0:05:44.044 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.040512", "end": "2025-04-02 12:23:30.057232", "rc": 0, "start": "2025-04-02 12:23:30.016720" } STDOUT: quay.io/libpod/registry 2.8.2 0030ba3d620c 20 months ago 24.6 MB TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Wednesday 02 April 2025 12:23:30 -0400 (0:00:00.443) 0:05:44.488 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.034544", "end": "2025-04-02 12:23:30.536087", "rc": 0, "start": "2025-04-02 12:23:30.501543" } STDOUT: local c1fc31f785346f7a5c9afe68ea235eba6fe181647a5c2fb992f846acb33ed7e7 TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Wednesday 02 April 2025 12:23:30 -0400 (0:00:00.467) 0:05:44.956 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.042757", "end": "2025-04-02 12:23:30.972240", "rc": 0, "start": "2025-04-02 12:23:30.929483" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Wednesday 02 April 2025 12:23:31 -0400 (0:00:00.441) 0:05:45.397 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.061834", "end": "2025-04-02 12:23:31.431264", "rc": 0, "start": "2025-04-02 12:23:31.369430" } STDOUT: podman quadlet-basic-name systemd-quadlet-basic-unused-network TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Wednesday 02 April 2025 12:23:31 -0400 (0:00:00.455) 0:05:45.852 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Wednesday 02 April 2025 12:23:31 -0400 (0:00:00.439) 0:05:46.292 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Wednesday 02 April 2025 12:23:32 -0400 (0:00:00.432) 0:05:46.724 ******* ok: [managed-node1] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "certmonger.service": { "name": "certmonger.service", "source": "systemd", "state": "running", "status": "enabled" }, "chrony-dnssrv@.service": { "name": "chrony-dnssrv@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cni-dhcp.service": { "name": "cni-dhcp.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "static" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus-org.freedesktop.portable1.service": { "name": "dbus-org.freedesktop.portable1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "running", "status": "static" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dnsmasq.service": { "name": "dnsmasq.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "unknown" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "halt-local.service": { "name": "halt-local.service", "source": "systemd", "state": "inactive", "status": "static" }, "import-state.service": { "name": "import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "iprdump.service": { "name": "iprdump.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprinit.service": { "name": "iprinit.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprupdate.service": { "name": "iprupdate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "loadmodules.service": { "name": "loadmodules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "messagebus.service": { "name": "messagebus.service", "source": "systemd", "state": "active", "status": "static" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-convert.service": { "name": "nfs-convert.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-halt.service": { "name": "plymouth-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-kexec.service": { "name": "plymouth-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-poweroff.service": { "name": "plymouth-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-quit.service": { "name": "plymouth-quit.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-read-write.service": { "name": "plymouth-read-write.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-reboot.service": { "name": "plymouth-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-switch-root-initramfs.service": { "name": "plymouth-switch-root-initramfs.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-switch-root.service": { "name": "plymouth-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quadlet-basic-network.service": { "name": "quadlet-basic-network.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quadlet-basic-unused-network-network.service": { "name": "quadlet-basic-unused-network-network.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "active", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-plymouth.service": { "name": "systemd-ask-password-plymouth.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-portabled.service": { "name": "systemd-portabled.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-resolved.service": { "name": "systemd-resolved.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "masked" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "tcsd.service": { "name": "tcsd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "timedatex.service": { "name": "timedatex.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "tuned.service": { "name": "tuned.service", "source": "systemd", "state": "running", "status": "enabled" }, "unbound-anchor.service": { "name": "unbound-anchor.service", "source": "systemd", "state": "stopped", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "unknown" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Wednesday 02 April 2025 12:23:34 -0400 (0:00:01.633) 0:05:48.358 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Wednesday 02 April 2025 12:23:34 -0400 (0:00:00.040) 0:05:48.399 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Network": {} }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Wednesday 02 April 2025 12:23:34 -0400 (0:00:00.052) 0:05:48.451 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Wednesday 02 April 2025 12:23:34 -0400 (0:00:00.116) 0:05:48.568 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Wednesday 02 April 2025 12:23:34 -0400 (0:00:00.041) 0:05:48.609 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-basic-unused-network", "__podman_quadlet_type": "network", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Wednesday 02 April 2025 12:23:34 -0400 (0:00:00.058) 0:05:48.668 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:23:34 -0400 (0:00:00.080) 0:05:48.749 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:23:34 -0400 (0:00:00.050) 0:05:48.799 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:23:34 -0400 (0:00:00.051) 0:05:48.851 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:23:34 -0400 (0:00:00.059) 0:05:48.911 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610609.2783134, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1743610533.3987217, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986653, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "4263604762", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:23:35 -0400 (0:00:00.432) 0:05:49.344 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:23:35 -0400 (0:00:00.069) 0:05:49.414 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:23:35 -0400 (0:00:00.066) 0:05:49.480 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:23:35 -0400 (0:00:00.069) 0:05:49.549 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:23:35 -0400 (0:00:00.070) 0:05:49.619 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:23:35 -0400 (0:00:00.070) 0:05:49.690 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:23:35 -0400 (0:00:00.069) 0:05:49.760 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:23:35 -0400 (0:00:00.162) 0:05:49.922 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Wednesday 02 April 2025 12:23:35 -0400 (0:00:00.052) 0:05:49.975 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-basic-unused-network-network.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Wednesday 02 April 2025 12:23:35 -0400 (0:00:00.093) 0:05:50.068 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Wednesday 02 April 2025 12:23:35 -0400 (0:00:00.047) 0:05:50.115 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Wednesday 02 April 2025 12:23:35 -0400 (0:00:00.042) 0:05:50.158 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-basic-unused-network.network", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Wednesday 02 April 2025 12:23:35 -0400 (0:00:00.122) 0:05:50.281 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Wednesday 02 April 2025 12:23:36 -0400 (0:00:00.078) 0:05:50.359 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Wednesday 02 April 2025 12:23:36 -0400 (0:00:00.172) 0:05:50.532 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Wednesday 02 April 2025 12:23:36 -0400 (0:00:00.064) 0:05:50.596 ******* changed: [managed-node1] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-basic-unused-network-network.service", "state": "stopped", "status": { "ActiveEnterTimestamp": "Wed 2025-04-02 12:21:43 EDT", "ActiveEnterTimestampMonotonic": "173673961", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "systemd-journald.socket sysinit.target basic.target system.slice -.mount", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "yes", "AssertTimestamp": "Wed 2025-04-02 12:21:43 EDT", "AssertTimestampMonotonic": "173618435", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Wed 2025-04-02 12:21:43 EDT", "ConditionTimestampMonotonic": "173618433", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/system.slice/quadlet-basic-unused-network-network.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "quadlet-basic-unused-network-network.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "ExecMainCode": "1", "ExecMainExitTimestamp": "Wed 2025-04-02 12:21:43 EDT", "ExecMainExitTimestampMonotonic": "173673799", "ExecMainPID": "26328", "ExecMainStartTimestamp": "Wed 2025-04-02 12:21:43 EDT", "ExecMainStartTimestampMonotonic": "173619289", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore systemd-quadlet-basic-unused-network ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/run/systemd/generator/quadlet-basic-unused-network-network.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "quadlet-basic-unused-network-network.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Wed 2025-04-02 12:21:43 EDT", "InactiveExitTimestampMonotonic": "173619330", "InvocationID": "ab8b406997e549cebffe54b42c10ab22", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "MemoryAccounting": "yes", "MemoryCurrent": "0", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "quadlet-basic-unused-network-network.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "sysinit.target system.slice -.mount", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Wed 2025-04-02 12:21:43 EDT", "StateChangeTimestampMonotonic": "173673961", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "exited", "SuccessAction": "none", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-basic-unused-network-network", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "0", "TasksMax": "22405", "TimeoutStartUSec": "infinity", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Wednesday 02 April 2025 12:23:36 -0400 (0:00:00.725) 0:05:51.322 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610903.021721, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "52c9d75ecaf81203cc1f1a3b1dd00fcd25067b01", "ctime": 1743610902.4617157, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 234881218, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1743610902.1907132, "nlink": 1, "path": "/etc/containers/systemd/quadlet-basic-unused-network.network", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 54, "uid": 0, "version": "2085951113", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Wednesday 02 April 2025 12:23:37 -0400 (0:00:00.417) 0:05:51.740 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Wednesday 02 April 2025 12:23:37 -0400 (0:00:00.150) 0:05:51.890 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Wednesday 02 April 2025 12:23:37 -0400 (0:00:00.386) 0:05:52.277 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Wednesday 02 April 2025 12:23:38 -0400 (0:00:00.060) 0:05:52.338 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Wednesday 02 April 2025 12:23:38 -0400 (0:00:00.044) 0:05:52.382 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Wednesday 02 April 2025 12:23:38 -0400 (0:00:00.055) 0:05:52.438 ******* changed: [managed-node1] => { "changed": true, "path": "/etc/containers/systemd/quadlet-basic-unused-network.network", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Wednesday 02 April 2025 12:23:38 -0400 (0:00:00.427) 0:05:52.866 ******* ok: [managed-node1] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Wednesday 02 April 2025 12:23:39 -0400 (0:00:00.670) 0:05:53.536 ******* changed: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Wednesday 02 April 2025 12:23:39 -0400 (0:00:00.501) 0:05:54.038 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Wednesday 02 April 2025 12:23:39 -0400 (0:00:00.059) 0:05:54.098 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Wednesday 02 April 2025 12:23:39 -0400 (0:00:00.039) 0:05:54.138 ******* changed: [managed-node1] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.034807", "end": "2025-04-02 12:23:40.175198", "rc": 0, "start": "2025-04-02 12:23:40.140391" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Wednesday 02 April 2025 12:23:40 -0400 (0:00:00.450) 0:05:54.589 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Wednesday 02 April 2025 12:23:40 -0400 (0:00:00.081) 0:05:54.670 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Wednesday 02 April 2025 12:23:40 -0400 (0:00:00.054) 0:05:54.725 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Wednesday 02 April 2025 12:23:40 -0400 (0:00:00.066) 0:05:54.792 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Wednesday 02 April 2025 12:23:40 -0400 (0:00:00.071) 0:05:54.863 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.036083", "end": "2025-04-02 12:23:40.900185", "rc": 0, "start": "2025-04-02 12:23:40.864102" } STDOUT: quay.io/libpod/registry 2.8.2 0030ba3d620c 20 months ago 24.6 MB TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Wednesday 02 April 2025 12:23:40 -0400 (0:00:00.464) 0:05:55.327 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.034849", "end": "2025-04-02 12:23:41.382308", "rc": 0, "start": "2025-04-02 12:23:41.347459" } STDOUT: local c1fc31f785346f7a5c9afe68ea235eba6fe181647a5c2fb992f846acb33ed7e7 TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Wednesday 02 April 2025 12:23:41 -0400 (0:00:00.487) 0:05:55.815 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.043160", "end": "2025-04-02 12:23:41.834454", "rc": 0, "start": "2025-04-02 12:23:41.791294" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Wednesday 02 April 2025 12:23:41 -0400 (0:00:00.441) 0:05:56.256 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.052887", "end": "2025-04-02 12:23:42.291268", "rc": 0, "start": "2025-04-02 12:23:42.238381" } STDOUT: podman quadlet-basic-name TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Wednesday 02 April 2025 12:23:42 -0400 (0:00:00.455) 0:05:56.712 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Wednesday 02 April 2025 12:23:43 -0400 (0:00:01.422) 0:05:58.135 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Wednesday 02 April 2025 12:23:44 -0400 (0:00:00.450) 0:05:58.585 ******* ok: [managed-node1] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "certmonger.service": { "name": "certmonger.service", "source": "systemd", "state": "running", "status": "enabled" }, "chrony-dnssrv@.service": { "name": "chrony-dnssrv@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cni-dhcp.service": { "name": "cni-dhcp.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "static" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus-org.freedesktop.portable1.service": { "name": "dbus-org.freedesktop.portable1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "running", "status": "static" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dnsmasq.service": { "name": "dnsmasq.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "unknown" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "halt-local.service": { "name": "halt-local.service", "source": "systemd", "state": "inactive", "status": "static" }, "import-state.service": { "name": "import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "iprdump.service": { "name": "iprdump.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprinit.service": { "name": "iprinit.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprupdate.service": { "name": "iprupdate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "loadmodules.service": { "name": "loadmodules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "messagebus.service": { "name": "messagebus.service", "source": "systemd", "state": "active", "status": "static" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-convert.service": { "name": "nfs-convert.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-halt.service": { "name": "plymouth-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-kexec.service": { "name": "plymouth-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-poweroff.service": { "name": "plymouth-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-quit.service": { "name": "plymouth-quit.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-read-write.service": { "name": "plymouth-read-write.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-reboot.service": { "name": "plymouth-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-switch-root-initramfs.service": { "name": "plymouth-switch-root-initramfs.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-switch-root.service": { "name": "plymouth-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quadlet-basic-network.service": { "name": "quadlet-basic-network.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "active", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-plymouth.service": { "name": "systemd-ask-password-plymouth.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-portabled.service": { "name": "systemd-portabled.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-resolved.service": { "name": "systemd-resolved.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "masked" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "tcsd.service": { "name": "tcsd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "timedatex.service": { "name": "timedatex.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "tuned.service": { "name": "tuned.service", "source": "systemd", "state": "running", "status": "enabled" }, "unbound-anchor.service": { "name": "unbound-anchor.service", "source": "systemd", "state": "stopped", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "unknown" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Wednesday 02 April 2025 12:23:45 -0400 (0:00:01.692) 0:06:00.277 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Wednesday 02 April 2025 12:23:46 -0400 (0:00:00.066) 0:06:00.344 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Network]\nSubnet=192.168.29.0/24\nGateway=192.168.29.1\nLabel=app=wordpress\nNetworkName=quadlet-basic-name\n", "__podman_quadlet_template_src": "templates/quadlet-basic.network.j2" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Wednesday 02 April 2025 12:23:46 -0400 (0:00:00.302) 0:06:00.647 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Wednesday 02 April 2025 12:23:46 -0400 (0:00:00.082) 0:06:00.730 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Wednesday 02 April 2025 12:23:46 -0400 (0:00:00.053) 0:06:00.783 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-basic", "__podman_quadlet_type": "network", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Wednesday 02 April 2025 12:23:46 -0400 (0:00:00.190) 0:06:00.973 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Wednesday 02 April 2025 12:23:46 -0400 (0:00:00.080) 0:06:01.053 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Wednesday 02 April 2025 12:23:46 -0400 (0:00:00.054) 0:06:01.108 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Wednesday 02 April 2025 12:23:46 -0400 (0:00:00.074) 0:06:01.182 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Wednesday 02 April 2025 12:23:46 -0400 (0:00:00.092) 0:06:01.275 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610609.2783134, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "bb5b46ffbafcaa8c4021f3c8b3cb8594f48ef34b", "ctime": 1743610533.3987217, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 6986653, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-sharedlib", "mode": "0755", "mtime": 1700557386.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 12640, "uid": 0, "version": "4263604762", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Wednesday 02 April 2025 12:23:47 -0400 (0:00:00.474) 0:06:01.749 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Wednesday 02 April 2025 12:23:47 -0400 (0:00:00.070) 0:06:01.820 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Wednesday 02 April 2025 12:23:47 -0400 (0:00:00.069) 0:06:01.889 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Wednesday 02 April 2025 12:23:47 -0400 (0:00:00.071) 0:06:01.961 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Wednesday 02 April 2025 12:23:47 -0400 (0:00:00.072) 0:06:02.034 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Wednesday 02 April 2025 12:23:47 -0400 (0:00:00.069) 0:06:02.103 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Wednesday 02 April 2025 12:23:47 -0400 (0:00:00.070) 0:06:02.173 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Wednesday 02 April 2025 12:23:47 -0400 (0:00:00.073) 0:06:02.246 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Wednesday 02 April 2025 12:23:47 -0400 (0:00:00.069) 0:06:02.316 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-basic-network.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Wednesday 02 April 2025 12:23:48 -0400 (0:00:00.300) 0:06:02.616 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Wednesday 02 April 2025 12:23:48 -0400 (0:00:00.047) 0:06:02.664 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Wednesday 02 April 2025 12:23:48 -0400 (0:00:00.043) 0:06:02.708 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-basic.network", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Wednesday 02 April 2025 12:23:48 -0400 (0:00:00.107) 0:06:02.815 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Wednesday 02 April 2025 12:23:48 -0400 (0:00:00.052) 0:06:02.867 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Wednesday 02 April 2025 12:23:48 -0400 (0:00:00.101) 0:06:02.969 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Wednesday 02 April 2025 12:23:48 -0400 (0:00:00.042) 0:06:03.012 ******* changed: [managed-node1] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-basic-network.service", "state": "stopped", "status": { "ActiveEnterTimestamp": "Wed 2025-04-02 12:21:39 EDT", "ActiveEnterTimestampMonotonic": "169063980", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "basic.target system.slice systemd-journald.socket -.mount sysinit.target", "AllowIsolate": "no", "AllowedCPUs": "", "AllowedMemoryNodes": "", "AmbientCapabilities": "", "AssertResult": "yes", "AssertTimestamp": "Wed 2025-04-02 12:21:39 EDT", "AssertTimestampMonotonic": "169021528", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "no", "CPUAffinity": "", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Wed 2025-04-02 12:21:39 EDT", "ConditionTimestampMonotonic": "169021527", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/system.slice/quadlet-basic-network.service", "ControlPID": "0", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "quadlet-basic-network.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveCPUs": "", "EffectiveMemoryNodes": "", "ExecMainCode": "1", "ExecMainExitTimestamp": "Wed 2025-04-02 12:21:39 EDT", "ExecMainExitTimestampMonotonic": "169063822", "ExecMainPID": "25568", "ExecMainStartTimestamp": "Wed 2025-04-02 12:21:39 EDT", "ExecMainStartTimestampMonotonic": "169022400", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore --subnet=192.168.29.0/24 --gateway=192.168.29.1 --label app=wordpress quadlet-basic-name ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FragmentPath": "/run/systemd/generator/quadlet-basic-network.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOSchedulingClass": "0", "IOSchedulingPriority": "0", "IOWeight": "[not set]", "IPAccounting": "no", "IPEgressBytes": "18446744073709551615", "IPEgressPackets": "18446744073709551615", "IPIngressBytes": "18446744073709551615", "IPIngressPackets": "18446744073709551615", "Id": "quadlet-basic-network.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Wed 2025-04-02 12:21:39 EDT", "InactiveExitTimestampMonotonic": "169022439", "InvocationID": "165ae7e70e8347569f4bf2825ad3310b", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "0", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "65536", "LimitMEMLOCKSoft": "65536", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "262144", "LimitNOFILESoft": "1024", "LimitNPROC": "14003", "LimitNPROCSoft": "14003", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "14003", "LimitSIGPENDINGSoft": "14003", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "MemoryAccounting": "yes", "MemoryCurrent": "0", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "MountFlags": "", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAMask": "", "NUMAPolicy": "n/a", "Names": "quadlet-basic-network.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "PermissionsStartOnly": "no", "Perpetual": "no", "PrivateDevices": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "sysinit.target -.mount system.slice", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardInputData": "", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Wed 2025-04-02 12:21:39 EDT", "StateChangeTimestampMonotonic": "169063980", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "exited", "SuccessAction": "none", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-basic-network", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "0", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "0", "TasksMax": "22405", "TimeoutStartUSec": "infinity", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Wednesday 02 April 2025 12:23:49 -0400 (0:00:00.698) 0:06:03.710 ******* ok: [managed-node1] => { "changed": false, "stat": { "atime": 1743610898.420678, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "19c9b17be2af9b9deca5c3bd327f048966750682", "ctime": 1743610897.8126721, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 207618242, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1743610897.5356696, "nlink": 1, "path": "/etc/containers/systemd/quadlet-basic.network", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 105, "uid": 0, "version": "2860497909", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Wednesday 02 April 2025 12:23:49 -0400 (0:00:00.429) 0:06:04.139 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Wednesday 02 April 2025 12:23:49 -0400 (0:00:00.104) 0:06:04.244 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Wednesday 02 April 2025 12:23:50 -0400 (0:00:00.428) 0:06:04.672 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Wednesday 02 April 2025 12:23:50 -0400 (0:00:00.077) 0:06:04.750 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Wednesday 02 April 2025 12:23:50 -0400 (0:00:00.061) 0:06:04.812 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Wednesday 02 April 2025 12:23:50 -0400 (0:00:00.059) 0:06:04.871 ******* changed: [managed-node1] => { "changed": true, "path": "/etc/containers/systemd/quadlet-basic.network", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Wednesday 02 April 2025 12:23:50 -0400 (0:00:00.387) 0:06:05.258 ******* ok: [managed-node1] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Wednesday 02 April 2025 12:23:51 -0400 (0:00:00.663) 0:06:05.922 ******* changed: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Wednesday 02 April 2025 12:23:52 -0400 (0:00:00.519) 0:06:06.442 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Wednesday 02 April 2025 12:23:52 -0400 (0:00:00.087) 0:06:06.529 ******* ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Wednesday 02 April 2025 12:23:52 -0400 (0:00:00.062) 0:06:06.592 ******* changed: [managed-node1] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.038157", "end": "2025-04-02 12:23:52.653384", "rc": 0, "start": "2025-04-02 12:23:52.615227" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Wednesday 02 April 2025 12:23:52 -0400 (0:00:00.568) 0:06:07.160 ******* included: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Wednesday 02 April 2025 12:23:52 -0400 (0:00:00.109) 0:06:07.270 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Wednesday 02 April 2025 12:23:52 -0400 (0:00:00.054) 0:06:07.325 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Wednesday 02 April 2025 12:23:53 -0400 (0:00:00.053) 0:06:07.378 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Wednesday 02 April 2025 12:23:53 -0400 (0:00:00.055) 0:06:07.433 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.041142", "end": "2025-04-02 12:23:53.458880", "rc": 0, "start": "2025-04-02 12:23:53.417738" } STDOUT: quay.io/libpod/registry 2.8.2 0030ba3d620c 20 months ago 24.6 MB TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Wednesday 02 April 2025 12:23:53 -0400 (0:00:00.458) 0:06:07.891 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.035116", "end": "2025-04-02 12:23:53.905188", "rc": 0, "start": "2025-04-02 12:23:53.870072" } STDOUT: local c1fc31f785346f7a5c9afe68ea235eba6fe181647a5c2fb992f846acb33ed7e7 TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Wednesday 02 April 2025 12:23:53 -0400 (0:00:00.438) 0:06:08.330 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.038816", "end": "2025-04-02 12:23:54.348124", "rc": 0, "start": "2025-04-02 12:23:54.309308" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Wednesday 02 April 2025 12:23:54 -0400 (0:00:00.448) 0:06:08.778 ******* ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.032768", "end": "2025-04-02 12:23:54.794138", "rc": 0, "start": "2025-04-02 12:23:54.761370" } STDOUT: podman TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Wednesday 02 April 2025 12:23:54 -0400 (0:00:00.444) 0:06:09.223 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Wednesday 02 April 2025 12:23:55 -0400 (0:00:00.528) 0:06:09.751 ******* ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Wednesday 02 April 2025 12:23:55 -0400 (0:00:00.429) 0:06:10.180 ******* ok: [managed-node1] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "running", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "certmonger.service": { "name": "certmonger.service", "source": "systemd", "state": "running", "status": "enabled" }, "chrony-dnssrv@.service": { "name": "chrony-dnssrv@.service", "source": "systemd", "state": "unknown", "status": "static" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cni-dhcp.service": { "name": "cni-dhcp.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "cpupower.service": { "name": "cpupower.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "static" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "active", "status": "enabled" }, "dbus-org.freedesktop.portable1.service": { "name": "dbus-org.freedesktop.portable1.service", "source": "systemd", "state": "inactive", "status": "static" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "running", "status": "static" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dnsmasq.service": { "name": "dnsmasq.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "inactive", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "unknown" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "halt-local.service": { "name": "halt-local.service", "source": "systemd", "state": "inactive", "status": "static" }, "import-state.service": { "name": "import-state.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "iprdump.service": { "name": "iprdump.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprinit.service": { "name": "iprinit.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "iprupdate.service": { "name": "iprupdate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "loadmodules.service": { "name": "loadmodules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "messagebus.service": { "name": "messagebus.service", "source": "systemd", "state": "active", "status": "static" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-convert.service": { "name": "nfs-convert.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "oddjobd.service": { "name": "oddjobd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "plymouth-halt.service": { "name": "plymouth-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-kexec.service": { "name": "plymouth-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-poweroff.service": { "name": "plymouth-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-quit.service": { "name": "plymouth-quit.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-read-write.service": { "name": "plymouth-read-write.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-reboot.service": { "name": "plymouth-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "static" }, "plymouth-switch-root-initramfs.service": { "name": "plymouth-switch-root-initramfs.service", "source": "systemd", "state": "inactive", "status": "static" }, "plymouth-switch-root.service": { "name": "plymouth-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon.service": { "name": "quotaon.service", "source": "systemd", "state": "inactive", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rdisc.service": { "name": "rdisc.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "active", "status": "enabled" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-plymouth.service": { "name": "systemd-ask-password-plymouth.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-resume@.service": { "name": "systemd-hibernate-resume@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-portabled.service": { "name": "systemd-portabled.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck.service": { "name": "systemd-quotacheck.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-resolved.service": { "name": "systemd-resolved.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "masked" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "tcsd.service": { "name": "tcsd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "teamd@.service": { "name": "teamd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "timedatex.service": { "name": "timedatex.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "tuned.service": { "name": "tuned.service", "source": "systemd", "state": "running", "status": "enabled" }, "unbound-anchor.service": { "name": "unbound-anchor.service", "source": "systemd", "state": "stopped", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "unknown" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "unknown" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Wednesday 02 April 2025 12:23:57 -0400 (0:00:01.649) 0:06:11.830 ******* skipping: [managed-node1] => { "changed": false, "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:198 Wednesday 02 April 2025 12:23:57 -0400 (0:00:00.041) 0:06:11.871 ******* TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:204 Wednesday 02 April 2025 12:23:57 -0400 (0:00:00.038) 0:06:11.909 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:213 Wednesday 02 April 2025 12:23:57 -0400 (0:00:00.040) 0:06:11.950 ******* skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Ensure no resources] ***************************************************** task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:325 Wednesday 02 April 2025 12:23:57 -0400 (0:00:00.063) 0:06:12.013 ******* fatal: [managed-node1]: FAILED! => { "assertion": "__podman_test_debug_images.stdout == \"\"", "changed": false, "evaluated_to": false } MSG: Assertion failed TASK [Dump journal] ************************************************************ task path: /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:336 Wednesday 02 April 2025 12:23:57 -0400 (0:00:00.068) 0:06:12.082 ******* fatal: [managed-node1]: FAILED! => { "changed": false, "cmd": [ "journalctl", "-ex" ], "delta": "0:00:00.025814", "end": "2025-04-02 12:23:58.097278", "failed_when_result": true, "rc": 0, "start": "2025-04-02 12:23:58.071464" } STDOUT: -- Logs begin at Wed 2025-04-02 12:18:50 EDT, end at Wed 2025-04-02 12:23:58 EDT. -- Apr 02 12:19:03 managed-node1 systemd[1]: Reached target Network. -- Subject: Unit network.target has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit network.target has finished starting up. -- -- The start-up result is done. Apr 02 12:19:03 managed-node1 NetworkManager[629]: [1743610743.2565] bus-manager: acquired D-Bus service "org.freedesktop.NetworkManager" Apr 02 12:19:03 managed-node1 systemd[1]: Starting GSSAPI Proxy Daemon... -- Subject: Unit gssproxy.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit gssproxy.service has begun starting up. Apr 02 12:19:03 managed-node1 systemd[1]: Starting Dynamic System Tuning Daemon... -- Subject: Unit tuned.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit tuned.service has begun starting up. Apr 02 12:19:03 managed-node1 systemd[1]: Starting Certificate monitoring and PKI enrollment... -- Subject: Unit certmonger.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit certmonger.service has begun starting up. Apr 02 12:19:03 managed-node1 systemd[1]: Starting Network Manager Wait Online... -- Subject: Unit NetworkManager-wait-online.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit NetworkManager-wait-online.service has begun starting up. Apr 02 12:19:03 managed-node1 NetworkManager[629]: [1743610743.2695] manager[0x563afa56b080]: monitoring kernel firmware directory '/lib/firmware'. Apr 02 12:19:03 managed-node1 dbus-daemon[584]: [system] Activating via systemd: service name='org.freedesktop.hostname1' unit='dbus-org.freedesktop.hostname1.service' requested by ':1.2' (uid=0 pid=629 comm="/usr/sbin/NetworkManager --no-daemon " label="system_u:system_r:NetworkManager_t:s0") Apr 02 12:19:03 managed-node1 systemd[1]: Starting Hostname Service... -- Subject: Unit systemd-hostnamed.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-hostnamed.service has begun starting up. Apr 02 12:19:03 managed-node1 systemd[1]: Started GSSAPI Proxy Daemon. -- Subject: Unit gssproxy.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit gssproxy.service has finished starting up. -- -- The start-up result is done. Apr 02 12:19:03 managed-node1 systemd[1]: Reached target NFS client services. -- Subject: Unit nfs-client.target has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit nfs-client.target has finished starting up. -- -- The start-up result is done. Apr 02 12:19:03 managed-node1 systemd[1]: Reached target Remote File Systems (Pre). -- Subject: Unit remote-fs-pre.target has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit remote-fs-pre.target has finished starting up. -- -- The start-up result is done. Apr 02 12:19:03 managed-node1 systemd[1]: Reached target Remote File Systems. -- Subject: Unit remote-fs.target has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit remote-fs.target has finished starting up. -- -- The start-up result is done. Apr 02 12:19:03 managed-node1 systemd[1]: Started Certificate monitoring and PKI enrollment. -- Subject: Unit certmonger.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit certmonger.service has finished starting up. -- -- The start-up result is done. Apr 02 12:19:03 managed-node1 dbus-daemon[584]: [system] Successfully activated service 'org.freedesktop.hostname1' Apr 02 12:19:03 managed-node1 systemd[1]: Started Hostname Service. -- Subject: Unit systemd-hostnamed.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-hostnamed.service has finished starting up. -- -- The start-up result is done. Apr 02 12:19:03 managed-node1 NetworkManager[629]: [1743610743.3803] hostname: hostname: using hostnamed Apr 02 12:19:03 managed-node1 NetworkManager[629]: [1743610743.3803] hostname: static hostname changed from (none) to "managed-node1" Apr 02 12:19:03 managed-node1 NetworkManager[629]: [1743610743.3808] dns-mgr: init: dns=default,systemd-resolved rc-manager=symlink Apr 02 12:19:03 managed-node1 NetworkManager[629]: [1743610743.5005] Loaded device plugin: NMTeamFactory (/usr/lib64/NetworkManager/1.40.16-15.el8/libnm-device-plugin-team.so) Apr 02 12:19:03 managed-node1 NetworkManager[629]: [1743610743.5005] manager: rfkill: Wi-Fi enabled by radio killswitch; enabled by state file Apr 02 12:19:03 managed-node1 NetworkManager[629]: [1743610743.5005] manager: rfkill: WWAN enabled by radio killswitch; enabled by state file Apr 02 12:19:03 managed-node1 NetworkManager[629]: [1743610743.5006] manager: Networking is enabled by state file Apr 02 12:19:03 managed-node1 dbus-daemon[584]: [system] Activating via systemd: service name='org.freedesktop.nm_dispatcher' unit='dbus-org.freedesktop.nm-dispatcher.service' requested by ':1.2' (uid=0 pid=629 comm="/usr/sbin/NetworkManager --no-daemon " label="system_u:system_r:NetworkManager_t:s0") Apr 02 12:19:03 managed-node1 systemd[1]: Starting Network Manager Script Dispatcher Service... -- Subject: Unit NetworkManager-dispatcher.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit NetworkManager-dispatcher.service has begun starting up. Apr 02 12:19:03 managed-node1 dbus-daemon[584]: [system] Successfully activated service 'org.freedesktop.nm_dispatcher' Apr 02 12:19:03 managed-node1 systemd[1]: Started Network Manager Script Dispatcher Service. -- Subject: Unit NetworkManager-dispatcher.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit NetworkManager-dispatcher.service has finished starting up. -- -- The start-up result is done. Apr 02 12:19:03 managed-node1 NetworkManager[629]: [1743610743.5868] settings: Loaded settings plugin: ifcfg-rh ("/usr/lib64/NetworkManager/1.40.16-15.el8/libnm-settings-plugin-ifcfg-rh.so") Apr 02 12:19:03 managed-node1 NetworkManager[629]: [1743610743.5869] settings: Loaded settings plugin: keyfile (internal) Apr 02 12:19:03 managed-node1 NetworkManager[629]: [1743610743.6372] dhcp: init: Using DHCP client 'internal' Apr 02 12:19:03 managed-node1 NetworkManager[629]: [1743610743.6372] device (lo): carrier: link connected Apr 02 12:19:03 managed-node1 NetworkManager[629]: [1743610743.6375] manager: (lo): new Generic device (/org/freedesktop/NetworkManager/Devices/1) Apr 02 12:19:03 managed-node1 NetworkManager[629]: [1743610743.6385] manager: (eth0): new Ethernet device (/org/freedesktop/NetworkManager/Devices/2) Apr 02 12:19:03 managed-node1 NetworkManager[629]: [1743610743.6388] device (eth0): state change: unmanaged -> unavailable (reason 'managed', sys-iface-state: 'external') Apr 02 12:19:03 managed-node1 kernel: IPv6: ADDRCONF(NETDEV_UP): eth0: link is not ready Apr 02 12:19:03 managed-node1 NetworkManager[629]: [1743610743.6411] device (eth0): carrier: link connected Apr 02 12:19:03 managed-node1 NetworkManager[629]: [1743610743.6412] device (eth0): state change: unavailable -> disconnected (reason 'carrier-changed', sys-iface-state: 'managed') Apr 02 12:19:03 managed-node1 NetworkManager[629]: [1743610743.6416] policy: auto-activating connection 'System eth0' (5fb06bd0-0bb0-7ffb-45f1-d6edd65f3e03) Apr 02 12:19:03 managed-node1 NetworkManager[629]: [1743610743.6421] device (eth0): Activation: starting connection 'System eth0' (5fb06bd0-0bb0-7ffb-45f1-d6edd65f3e03) Apr 02 12:19:03 managed-node1 NetworkManager[629]: [1743610743.6422] device (eth0): state change: disconnected -> prepare (reason 'none', sys-iface-state: 'managed') Apr 02 12:19:03 managed-node1 NetworkManager[629]: [1743610743.6424] manager: NetworkManager state is now CONNECTING Apr 02 12:19:03 managed-node1 NetworkManager[629]: [1743610743.6425] device (eth0): state change: prepare -> config (reason 'none', sys-iface-state: 'managed') Apr 02 12:19:03 managed-node1 NetworkManager[629]: [1743610743.6437] device (eth0): state change: config -> ip-config (reason 'none', sys-iface-state: 'managed') Apr 02 12:19:03 managed-node1 NetworkManager[629]: [1743610743.6441] dhcp4 (eth0): activation: beginning transaction (timeout in 45 seconds) Apr 02 12:19:03 managed-node1 NetworkManager[629]: [1743610743.6506] dhcp4 (eth0): state changed new lease, address=10.31.44.176 Apr 02 12:19:03 managed-node1 NetworkManager[629]: [1743610743.6517] policy: set 'System eth0' (eth0) as default for IPv4 routing and DNS Apr 02 12:19:03 managed-node1 dbus-daemon[584]: [system] Activating via systemd: service name='org.freedesktop.resolve1' unit='dbus-org.freedesktop.resolve1.service' requested by ':1.2' (uid=0 pid=629 comm="/usr/sbin/NetworkManager --no-daemon " label="system_u:system_r:NetworkManager_t:s0") Apr 02 12:19:03 managed-node1 dbus-daemon[584]: [system] Activation via systemd failed for unit 'dbus-org.freedesktop.resolve1.service': Unit dbus-org.freedesktop.resolve1.service not found. Apr 02 12:19:03 managed-node1 NetworkManager[629]: [1743610743.6612] device (eth0): state change: ip-config -> ip-check (reason 'none', sys-iface-state: 'managed') Apr 02 12:19:03 managed-node1 NetworkManager[629]: [1743610743.6656] device (eth0): state change: ip-check -> secondaries (reason 'none', sys-iface-state: 'managed') Apr 02 12:19:03 managed-node1 NetworkManager[629]: [1743610743.6661] device (eth0): state change: secondaries -> activated (reason 'none', sys-iface-state: 'managed') Apr 02 12:19:03 managed-node1 NetworkManager[629]: [1743610743.6669] manager: NetworkManager state is now CONNECTED_SITE Apr 02 12:19:03 managed-node1 NetworkManager[629]: [1743610743.6677] device (eth0): Activation: successful, device activated. Apr 02 12:19:03 managed-node1 NetworkManager[629]: [1743610743.6690] manager: NetworkManager state is now CONNECTED_GLOBAL Apr 02 12:19:03 managed-node1 NetworkManager[629]: [1743610743.6694] manager: startup complete Apr 02 12:19:03 managed-node1 systemd[1]: Started Network Manager Wait Online. -- Subject: Unit NetworkManager-wait-online.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit NetworkManager-wait-online.service has finished starting up. -- -- The start-up result is done. Apr 02 12:19:03 managed-node1 systemd[1]: Starting Initial cloud-init job (metadata service crawler)... -- Subject: Unit cloud-init.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit cloud-init.service has begun starting up. Apr 02 12:19:04 managed-node1 chronyd[587]: Added source 10.2.32.37 Apr 02 12:19:04 managed-node1 chronyd[587]: Added source 10.2.32.38 Apr 02 12:19:04 managed-node1 chronyd[587]: Added source 10.11.160.238 Apr 02 12:19:04 managed-node1 chronyd[587]: Added source 10.18.100.10 Apr 02 12:19:04 managed-node1 cloud-init[718]: Cloud-init v. 23.4-7.el8.2 running 'init' at Wed, 02 Apr 2025 16:19:04 +0000. Up 14.02 seconds. Apr 02 12:19:04 managed-node1 cloud-init[718]: ci-info: ++++++++++++++++++++++++++++++++++++++Net device info++++++++++++++++++++++++++++++++++++++ Apr 02 12:19:04 managed-node1 cloud-init[718]: ci-info: +--------+------+----------------------------+---------------+--------+-------------------+ Apr 02 12:19:04 managed-node1 cloud-init[718]: ci-info: | Device | Up | Address | Mask | Scope | Hw-Address | Apr 02 12:19:04 managed-node1 cloud-init[718]: ci-info: +--------+------+----------------------------+---------------+--------+-------------------+ Apr 02 12:19:04 managed-node1 cloud-init[718]: ci-info: | eth0 | True | 10.31.44.176 | 255.255.252.0 | global | 02:66:7a:fb:eb:9b | Apr 02 12:19:04 managed-node1 cloud-init[718]: ci-info: | eth0 | True | fe80::66:7aff:fefb:eb9b/64 | . | link | 02:66:7a:fb:eb:9b | Apr 02 12:19:04 managed-node1 cloud-init[718]: ci-info: | lo | True | 127.0.0.1 | 255.0.0.0 | host | . | Apr 02 12:19:04 managed-node1 cloud-init[718]: ci-info: | lo | True | ::1/128 | . | host | . | Apr 02 12:19:04 managed-node1 cloud-init[718]: ci-info: +--------+------+----------------------------+---------------+--------+-------------------+ Apr 02 12:19:04 managed-node1 cloud-init[718]: ci-info: ++++++++++++++++++++++++++++Route IPv4 info+++++++++++++++++++++++++++++ Apr 02 12:19:04 managed-node1 cloud-init[718]: ci-info: +-------+-------------+------------+---------------+-----------+-------+ Apr 02 12:19:04 managed-node1 cloud-init[718]: ci-info: | Route | Destination | Gateway | Genmask | Interface | Flags | Apr 02 12:19:04 managed-node1 cloud-init[718]: ci-info: +-------+-------------+------------+---------------+-----------+-------+ Apr 02 12:19:04 managed-node1 cloud-init[718]: ci-info: | 0 | 0.0.0.0 | 10.31.44.1 | 0.0.0.0 | eth0 | UG | Apr 02 12:19:04 managed-node1 cloud-init[718]: ci-info: | 1 | 10.31.44.0 | 0.0.0.0 | 255.255.252.0 | eth0 | U | Apr 02 12:19:04 managed-node1 cloud-init[718]: ci-info: +-------+-------------+------------+---------------+-----------+-------+ Apr 02 12:19:04 managed-node1 cloud-init[718]: ci-info: +++++++++++++++++++Route IPv6 info+++++++++++++++++++ Apr 02 12:19:04 managed-node1 cloud-init[718]: ci-info: +-------+-------------+---------+-----------+-------+ Apr 02 12:19:04 managed-node1 cloud-init[718]: ci-info: | Route | Destination | Gateway | Interface | Flags | Apr 02 12:19:04 managed-node1 cloud-init[718]: ci-info: +-------+-------------+---------+-----------+-------+ Apr 02 12:19:04 managed-node1 cloud-init[718]: ci-info: | 1 | fe80::/64 | :: | eth0 | U | Apr 02 12:19:04 managed-node1 cloud-init[718]: ci-info: | 3 | multicast | :: | eth0 | U | Apr 02 12:19:04 managed-node1 cloud-init[718]: ci-info: +-------+-------------+---------+-----------+-------+ Apr 02 12:19:04 managed-node1 systemd[1]: Started Initial cloud-init job (metadata service crawler). -- Subject: Unit cloud-init.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit cloud-init.service has finished starting up. -- -- The start-up result is done. Apr 02 12:19:04 managed-node1 systemd[1]: Reached target Network is Online. -- Subject: Unit network-online.target has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit network-online.target has finished starting up. -- -- The start-up result is done. Apr 02 12:19:04 managed-node1 systemd[1]: Starting Crash recovery kernel arming... -- Subject: Unit kdump.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit kdump.service has begun starting up. Apr 02 12:19:04 managed-node1 systemd[1]: Starting System Logging Service... -- Subject: Unit rsyslog.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit rsyslog.service has begun starting up. Apr 02 12:19:04 managed-node1 systemd[1]: Starting The restraint harness.... -- Subject: Unit restraintd.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit restraintd.service has begun starting up. Apr 02 12:19:04 managed-node1 systemd[1]: Starting Notify NFS peers of a restart... -- Subject: Unit rpc-statd-notify.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit rpc-statd-notify.service has begun starting up. Apr 02 12:19:04 managed-node1 systemd[1]: Reached target Cloud-config availability. -- Subject: Unit cloud-config.target has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit cloud-config.target has finished starting up. -- -- The start-up result is done. Apr 02 12:19:04 managed-node1 systemd[1]: Starting Apply the settings specified in cloud-config... -- Subject: Unit cloud-config.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit cloud-config.service has begun starting up. Apr 02 12:19:04 managed-node1 systemd[1]: Starting OpenSSH server daemon... -- Subject: Unit sshd.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit sshd.service has begun starting up. Apr 02 12:19:04 managed-node1 sm-notify[790]: Version 2.3.3 starting Apr 02 12:19:04 managed-node1 systemd[1]: Started Notify NFS peers of a restart. -- Subject: Unit rpc-statd-notify.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit rpc-statd-notify.service has finished starting up. -- -- The start-up result is done. Apr 02 12:19:04 managed-node1 sshd[792]: Server listening on 0.0.0.0 port 22. Apr 02 12:19:04 managed-node1 sshd[792]: Server listening on :: port 22. Apr 02 12:19:04 managed-node1 systemd[1]: Started OpenSSH server daemon. -- Subject: Unit sshd.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit sshd.service has finished starting up. -- -- The start-up result is done. Apr 02 12:19:04 managed-node1 systemd[1]: Started The restraint harness.. -- Subject: Unit restraintd.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit restraintd.service has finished starting up. -- -- The start-up result is done. Apr 02 12:19:05 managed-node1 dbus-daemon[584]: [system] Activating via systemd: service name='org.freedesktop.PolicyKit1' unit='polkit.service' requested by ':1.8' (uid=0 pid=633 comm="/usr/libexec/platform-python -Es /usr/sbin/tuned -" label="system_u:system_r:tuned_t:s0") Apr 02 12:19:05 managed-node1 systemd[1]: Starting Authorization Manager... -- Subject: Unit polkit.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit polkit.service has begun starting up. Apr 02 12:19:05 managed-node1 rsyslogd[788]: [origin software="rsyslogd" swVersion="8.2102.0-15.el8" x-pid="788" x-info="https://www.rsyslog.com"] start Apr 02 12:19:05 managed-node1 systemd[1]: Started System Logging Service. -- Subject: Unit rsyslog.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit rsyslog.service has finished starting up. -- -- The start-up result is done. Apr 02 12:19:05 managed-node1 rsyslogd[788]: imjournal: journal files changed, reloading... [v8.2102.0-15.el8 try https://www.rsyslog.com/e/0 ] Apr 02 12:19:05 managed-node1 systemd[1]: Started Dynamic System Tuning Daemon. -- Subject: Unit tuned.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit tuned.service has finished starting up. -- -- The start-up result is done. Apr 02 12:19:05 managed-node1 cloud-init[1198]: Cloud-init v. 23.4-7.el8.2 running 'modules:config' at Wed, 02 Apr 2025 16:19:05 +0000. Up 15.54 seconds. Apr 02 12:19:05 managed-node1 systemd[1]: Started Apply the settings specified in cloud-config. -- Subject: Unit cloud-config.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit cloud-config.service has finished starting up. -- -- The start-up result is done. Apr 02 12:19:05 managed-node1 systemd[1]: Starting Permit User Sessions... -- Subject: Unit systemd-user-sessions.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-user-sessions.service has begun starting up. Apr 02 12:19:05 managed-node1 systemd[1]: Starting Execute cloud user/final scripts... -- Subject: Unit cloud-final.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit cloud-final.service has begun starting up. Apr 02 12:19:05 managed-node1 systemd[1]: Started Permit User Sessions. -- Subject: Unit systemd-user-sessions.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-user-sessions.service has finished starting up. -- -- The start-up result is done. Apr 02 12:19:05 managed-node1 systemd[1]: Starting Hold until boot process finishes up... -- Subject: Unit plymouth-quit-wait.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit plymouth-quit-wait.service has begun starting up. Apr 02 12:19:05 managed-node1 systemd[1]: Starting Terminate Plymouth Boot Screen... -- Subject: Unit plymouth-quit.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit plymouth-quit.service has begun starting up. Apr 02 12:19:05 managed-node1 systemd[1]: Started Command Scheduler. -- Subject: Unit crond.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit crond.service has finished starting up. -- -- The start-up result is done. Apr 02 12:19:05 managed-node1 crond[1220]: (CRON) STARTUP (1.5.2) Apr 02 12:19:05 managed-node1 crond[1220]: (CRON) INFO (Syslog will be used instead of sendmail.) Apr 02 12:19:05 managed-node1 crond[1220]: (CRON) INFO (RANDOM_DELAY will be scaled with factor 20% if used.) Apr 02 12:19:05 managed-node1 systemd[1]: Received SIGRTMIN+21 from PID 319 (plymouthd). Apr 02 12:19:05 managed-node1 crond[1220]: (CRON) INFO (running with inotify support) Apr 02 12:19:05 managed-node1 systemd[1]: Received SIGRTMIN+21 from PID 319 (n/a). Apr 02 12:19:05 managed-node1 systemd[1]: Started Hold until boot process finishes up. -- Subject: Unit plymouth-quit-wait.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit plymouth-quit-wait.service has finished starting up. -- -- The start-up result is done. Apr 02 12:19:05 managed-node1 systemd[1]: Started Getty on tty1. -- Subject: Unit getty@tty1.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit getty@tty1.service has finished starting up. -- -- The start-up result is done. Apr 02 12:19:05 managed-node1 systemd[1]: Reached target Login Prompts. -- Subject: Unit getty.target has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit getty.target has finished starting up. -- -- The start-up result is done. Apr 02 12:19:05 managed-node1 systemd[1]: Started Terminate Plymouth Boot Screen. -- Subject: Unit plymouth-quit.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit plymouth-quit.service has finished starting up. -- -- The start-up result is done. Apr 02 12:19:05 managed-node1 systemd[1]: Reached target Multi-User System. -- Subject: Unit multi-user.target has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit multi-user.target has finished starting up. -- -- The start-up result is done. Apr 02 12:19:05 managed-node1 systemd[1]: Starting Update UTMP about System Runlevel Changes... -- Subject: Unit systemd-update-utmp-runlevel.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-update-utmp-runlevel.service has begun starting up. Apr 02 12:19:05 managed-node1 systemd[1]: systemd-update-utmp-runlevel.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit systemd-update-utmp-runlevel.service has successfully entered the 'dead' state. Apr 02 12:19:05 managed-node1 systemd[1]: Started Update UTMP about System Runlevel Changes. -- Subject: Unit systemd-update-utmp-runlevel.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit systemd-update-utmp-runlevel.service has finished starting up. -- -- The start-up result is done. Apr 02 12:19:05 managed-node1 polkitd[1142]: Started polkitd version 0.115 Apr 02 12:19:06 managed-node1 polkitd[1142]: Loading rules from directory /etc/polkit-1/rules.d Apr 02 12:19:06 managed-node1 polkitd[1142]: Loading rules from directory /usr/share/polkit-1/rules.d Apr 02 12:19:06 managed-node1 polkitd[1142]: Finished loading, compiling and executing 2 rules Apr 02 12:19:06 managed-node1 dbus-daemon[584]: [system] Successfully activated service 'org.freedesktop.PolicyKit1' Apr 02 12:19:06 managed-node1 systemd[1]: Started Authorization Manager. -- Subject: Unit polkit.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit polkit.service has finished starting up. -- -- The start-up result is done. Apr 02 12:19:06 managed-node1 polkitd[1142]: Acquired the name org.freedesktop.PolicyKit1 on the system bus Apr 02 12:19:06 managed-node1 restraintd[801]: Listening on http://localhost:8081 Apr 02 12:19:06 managed-node1 cloud-init[1247]: Cloud-init v. 23.4-7.el8.2 running 'modules:final' at Wed, 02 Apr 2025 16:19:06 +0000. Up 16.24 seconds. Apr 02 12:19:06 managed-node1 cloud-init[1247]: Cloud-init v. 23.4-7.el8.2 finished at Wed, 02 Apr 2025 16:19:06 +0000. Datasource DataSourceEc2Local. Up 16.81 seconds Apr 02 12:19:06 managed-node1 systemd[1]: Started Execute cloud user/final scripts. -- Subject: Unit cloud-final.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit cloud-final.service has finished starting up. -- -- The start-up result is done. Apr 02 12:19:06 managed-node1 systemd[1]: Reached target Cloud-init target. -- Subject: Unit cloud-init.target has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit cloud-init.target has finished starting up. -- -- The start-up result is done. Apr 02 12:19:06 managed-node1 kdumpctl[794]: kdump: kexec: loaded kdump kernel Apr 02 12:19:06 managed-node1 kdumpctl[794]: kdump: Starting kdump: [OK] Apr 02 12:19:06 managed-node1 systemd[1]: Started Crash recovery kernel arming. -- Subject: Unit kdump.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit kdump.service has finished starting up. -- -- The start-up result is done. Apr 02 12:19:06 managed-node1 systemd[1]: Startup finished in 521ms (kernel) + 3.747s (initrd) + 12.729s (userspace) = 16.998s. -- Subject: System start-up is now complete -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- All system services necessary queued for starting at boot have been -- started. Note that this does not mean that the machine is now idle as services -- might still be busy with completing start-up. -- -- Kernel start-up required 521684 microseconds. -- -- Initial RAM disk start-up required 3747345 microseconds. -- -- Userspace start-up required 12729882 microseconds. Apr 02 12:19:09 managed-node1 chronyd[587]: Selected source 10.11.160.238 Apr 02 12:19:09 managed-node1 chronyd[587]: System clock TAI offset set to 37 seconds Apr 02 12:19:13 managed-node1 sshd[1400]: Accepted publickey for root from 10.31.47.33 port 43278 ssh2: RSA SHA256:9j1blwt3wcrRiGYZQ7ZGu9axm3cDklH6/z4c+Ee8CzE Apr 02 12:19:13 managed-node1 systemd[1]: /etc/systemd/system/user-0.slice:4: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument Apr 02 12:19:13 managed-node1 systemd[1]: /etc/systemd/system/user-.slice.d/override.conf:2: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument Apr 02 12:19:13 managed-node1 systemd[1]: Created slice User Slice of UID 0. -- Subject: Unit user-0.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user-0.slice has finished starting up. -- -- The start-up result is done. Apr 02 12:19:13 managed-node1 systemd[1]: Starting User runtime directory /run/user/0... -- Subject: Unit user-runtime-dir@0.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user-runtime-dir@0.service has begun starting up. Apr 02 12:19:13 managed-node1 systemd-logind[581]: New session 1 of user root. -- Subject: A new session 1 has been created for user root -- Defined-By: systemd -- Support: https://access.redhat.com/support -- Documentation: https://www.freedesktop.org/wiki/Software/systemd/multiseat -- -- A new session with the ID 1 has been created for the user root. -- -- The leading process of the session is 1400. Apr 02 12:19:13 managed-node1 systemd[1]: Started User runtime directory /run/user/0. -- Subject: Unit user-runtime-dir@0.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user-runtime-dir@0.service has finished starting up. -- -- The start-up result is done. Apr 02 12:19:13 managed-node1 systemd[1]: Starting User Manager for UID 0... -- Subject: Unit user@0.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user@0.service has begun starting up. Apr 02 12:19:13 managed-node1 systemd[1404]: pam_unix(systemd-user:session): session opened for user root by (uid=0) Apr 02 12:19:13 managed-node1 systemd[1404]: Starting D-Bus User Message Bus Socket. -- Subject: Unit UNIT has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has begun starting up. Apr 02 12:19:13 managed-node1 systemd[1404]: Reached target Timers. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:19:13 managed-node1 systemd[1404]: Reached target Paths. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:19:13 managed-node1 systemd[1404]: Listening on D-Bus User Message Bus Socket. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:19:13 managed-node1 systemd[1404]: Reached target Sockets. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:19:13 managed-node1 systemd[1404]: Reached target Basic System. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:19:13 managed-node1 systemd[1404]: Reached target Default. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:19:13 managed-node1 systemd[1404]: Startup finished in 174ms. -- Subject: User manager start-up is now complete -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The user manager instance for user 0 has been started. All services queued -- for starting have been started. Note that other services might still be starting -- up or be started at any later time. -- -- Startup of the manager took 174108 microseconds. Apr 02 12:19:13 managed-node1 systemd[1]: Started User Manager for UID 0. -- Subject: Unit user@0.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user@0.service has finished starting up. -- -- The start-up result is done. Apr 02 12:19:13 managed-node1 systemd[1]: Started Session 1 of user root. -- Subject: Unit session-1.scope has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit session-1.scope has finished starting up. -- -- The start-up result is done. Apr 02 12:19:13 managed-node1 sshd[1400]: pam_unix(sshd:session): session opened for user root by (uid=0) Apr 02 12:19:14 managed-node1 systemd[1]: NetworkManager-dispatcher.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Apr 02 12:19:17 managed-node1 platform-python[1712]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:19:18 managed-node1 platform-python[1841]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:19:20 managed-node1 platform-python[1966]: ansible-getent Invoked with database=passwd key=user_quadlet_basic fail_key=False service=None split=None Apr 02 12:19:22 managed-node1 platform-python[2090]: ansible-command Invoked with creates=/var/lib/systemd/linger/user_quadlet_basic _raw_params=loginctl enable-linger user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Apr 02 12:19:22 managed-node1 systemd[1]: /etc/systemd/system/user-.slice.d/override.conf:2: Failed to assign slice user.slice to unit user-1111.slice, ignoring: Invalid argument Apr 02 12:19:22 managed-node1 systemd[1]: Created slice User Slice of UID 1111. -- Subject: Unit user-1111.slice has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user-1111.slice has finished starting up. -- -- The start-up result is done. Apr 02 12:19:22 managed-node1 systemd[1]: Starting User runtime directory /run/user/1111... -- Subject: Unit user-runtime-dir@1111.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user-runtime-dir@1111.service has begun starting up. Apr 02 12:19:22 managed-node1 systemd[1]: Started User runtime directory /run/user/1111. -- Subject: Unit user-runtime-dir@1111.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user-runtime-dir@1111.service has finished starting up. -- -- The start-up result is done. Apr 02 12:19:22 managed-node1 systemd[1]: Starting User Manager for UID 1111... -- Subject: Unit user@1111.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user@1111.service has begun starting up. Apr 02 12:19:22 managed-node1 systemd[2094]: pam_unix(systemd-user:session): session opened for user user_quadlet_basic by (uid=0) Apr 02 12:19:22 managed-node1 systemd[2094]: Starting D-Bus User Message Bus Socket. -- Subject: Unit UNIT has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has begun starting up. Apr 02 12:19:22 managed-node1 systemd[2094]: Reached target Paths. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:19:22 managed-node1 systemd[2094]: Started Mark boot as successful after the user session has run 2 minutes. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:19:22 managed-node1 systemd[2094]: Reached target Timers. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:19:22 managed-node1 systemd[2094]: Listening on D-Bus User Message Bus Socket. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:19:22 managed-node1 systemd[2094]: Reached target Sockets. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:19:22 managed-node1 systemd[2094]: Reached target Basic System. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:19:22 managed-node1 systemd[2094]: Reached target Default. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:19:22 managed-node1 systemd[2094]: Startup finished in 36ms. -- Subject: User manager start-up is now complete -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The user manager instance for user 1111 has been started. All services queued -- for starting have been started. Note that other services might still be starting -- up or be started at any later time. -- -- Startup of the manager took 36575 microseconds. Apr 02 12:19:22 managed-node1 systemd[1]: Started User Manager for UID 1111. -- Subject: Unit user@1111.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user@1111.service has finished starting up. -- -- The start-up result is done. Apr 02 12:19:22 managed-node1 platform-python[2227]: ansible-stat Invoked with path=/run/user/1111 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:19:23 managed-node1 sudo[2352]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qclfqivydpuhdoyspbkwhovvuidvhdgn ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610762.9110796-15455-18253467889005/AnsiballZ_podman_secret.py' Apr 02 12:19:23 managed-node1 sudo[2352]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:19:23 managed-node1 kernel: evm: overlay not supported Apr 02 12:19:23 managed-node1 systemd[2094]: Started D-Bus User Message Bus. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:19:23 managed-node1 systemd[2094]: Created slice user.slice. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:19:23 managed-node1 systemd[2094]: Started podman-2370.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:19:23 managed-node1 systemd[2094]: Started podman-pause-a17f0318.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:19:23 managed-node1 systemd[2094]: Started podman-2388.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:19:23 managed-node1 systemd[2094]: Started podman-2395.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:19:23 managed-node1 sudo[2352]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:19:25 managed-node1 platform-python[2523]: ansible-command Invoked with creates=/var/lib/systemd/linger/user_quadlet_basic _raw_params=loginctl enable-linger user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Apr 02 12:19:26 managed-node1 platform-python[2646]: ansible-stat Invoked with path=/run/user/1111 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:19:26 managed-node1 sudo[2771]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-hjebdcqpzlmnfyadchdroeqmotkzwlxy ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610766.2017386-15605-59941119904652/AnsiballZ_podman_secret.py' Apr 02 12:19:26 managed-node1 sudo[2771]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:19:26 managed-node1 systemd[2094]: Started podman-2782.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:19:26 managed-node1 systemd[2094]: Started podman-2789.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:19:26 managed-node1 systemd[2094]: Started podman-2796.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:19:26 managed-node1 sudo[2771]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:19:27 managed-node1 platform-python[2924]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:19:28 managed-node1 platform-python[3049]: ansible-command Invoked with _raw_params=getsubids user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:19:28 managed-node1 platform-python[3173]: ansible-command Invoked with _raw_params=getsubids -g user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:19:29 managed-node1 platform-python[3297]: ansible-command Invoked with creates=/var/lib/systemd/linger/user_quadlet_basic _raw_params=loginctl enable-linger user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Apr 02 12:19:30 managed-node1 platform-python[3420]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd state=directory owner=user_quadlet_basic group=1111 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Apr 02 12:19:30 managed-node1 platform-python[3543]: ansible-stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic.network follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Apr 02 12:19:31 managed-node1 platform-python[3642]: ansible-copy Invoked with dest=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic.network owner=user_quadlet_basic group=1111 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1743610770.6321862-15801-233949183344885/source _original_basename=tmppge9mqeh follow=False checksum=19c9b17be2af9b9deca5c3bd327f048966750682 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None Apr 02 12:19:31 managed-node1 sudo[3767]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-chafgtyljihdpphmwlcmfoqgmkadvlgq ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610771.511609-15843-214957815345759/AnsiballZ_systemd.py' Apr 02 12:19:31 managed-node1 sudo[3767]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:19:31 managed-node1 platform-python[3770]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None Apr 02 12:19:31 managed-node1 systemd[2094]: Reloading. Apr 02 12:19:31 managed-node1 sudo[3767]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:19:32 managed-node1 sudo[3904]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rzrhotfdjlbyajoyftuclfurzypkyfug ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610772.0870495-15869-130260789779326/AnsiballZ_systemd.py' Apr 02 12:19:32 managed-node1 sudo[3904]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:19:32 managed-node1 platform-python[3907]: ansible-systemd Invoked with name=quadlet-basic-network.service scope=user state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None user=None Apr 02 12:19:32 managed-node1 systemd[2094]: Starting quadlet-basic-network.service... -- Subject: Unit UNIT has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has begun starting up. Apr 02 12:19:32 managed-node1 quadlet-basic-network[3913]: quadlet-basic-name Apr 02 12:19:32 managed-node1 systemd[2094]: Started quadlet-basic-network.service. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:19:32 managed-node1 sudo[3904]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:19:33 managed-node1 systemd[1]: systemd-hostnamed.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit systemd-hostnamed.service has successfully entered the 'dead' state. Apr 02 12:19:33 managed-node1 platform-python[4043]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:19:34 managed-node1 platform-python[4168]: ansible-command Invoked with _raw_params=getsubids user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:19:34 managed-node1 platform-python[4292]: ansible-command Invoked with _raw_params=getsubids -g user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:19:35 managed-node1 platform-python[4416]: ansible-command Invoked with creates=/var/lib/systemd/linger/user_quadlet_basic _raw_params=loginctl enable-linger user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Apr 02 12:19:36 managed-node1 platform-python[4539]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd state=directory owner=user_quadlet_basic group=1111 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Apr 02 12:19:36 managed-node1 platform-python[4662]: ansible-stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-network.network follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Apr 02 12:19:37 managed-node1 platform-python[4761]: ansible-copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1743610776.6376607-16091-44256089335709/source dest=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-network.network owner=user_quadlet_basic group=1111 mode=0644 follow=False _original_basename=systemd.j2 checksum=52c9d75ecaf81203cc1f1a3b1dd00fcd25067b01 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None Apr 02 12:19:37 managed-node1 sudo[4886]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-suhupuksdyfdertmjkrxdnwxntjhbnnw ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610777.3617632-16126-262803850407316/AnsiballZ_systemd.py' Apr 02 12:19:37 managed-node1 sudo[4886]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:19:37 managed-node1 platform-python[4889]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None Apr 02 12:19:37 managed-node1 systemd[2094]: Reloading. Apr 02 12:19:37 managed-node1 sudo[4886]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:19:38 managed-node1 sudo[5023]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-phfmkjiprfrzfsznnwzispeclbzieifk ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610777.9499874-16144-252615644297635/AnsiballZ_systemd.py' Apr 02 12:19:38 managed-node1 sudo[5023]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:19:38 managed-node1 platform-python[5026]: ansible-systemd Invoked with name=quadlet-basic-unused-network-network.service scope=user state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None user=None Apr 02 12:19:38 managed-node1 systemd[2094]: Starting quadlet-basic-unused-network-network.service... -- Subject: Unit UNIT has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has begun starting up. Apr 02 12:19:38 managed-node1 quadlet-basic-unused-network-network[5032]: systemd-quadlet-basic-unused-network Apr 02 12:19:38 managed-node1 systemd[2094]: Started quadlet-basic-unused-network-network.service. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:19:38 managed-node1 sudo[5023]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:19:39 managed-node1 platform-python[5185]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:19:39 managed-node1 platform-python[5310]: ansible-command Invoked with _raw_params=getsubids user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:19:40 managed-node1 platform-python[5434]: ansible-command Invoked with _raw_params=getsubids -g user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:19:42 managed-node1 platform-python[5558]: ansible-command Invoked with creates=/var/lib/systemd/linger/user_quadlet_basic _raw_params=loginctl enable-linger user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Apr 02 12:19:42 managed-node1 platform-python[5681]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd state=directory owner=user_quadlet_basic group=1111 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Apr 02 12:19:43 managed-node1 platform-python[5804]: ansible-stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.volume follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Apr 02 12:19:43 managed-node1 platform-python[5903]: ansible-copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1743610782.852917-16386-47120096876272/source dest=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.volume owner=user_quadlet_basic group=1111 mode=0644 follow=False _original_basename=systemd.j2 checksum=90a3571bfc7670328fe3f8fb625585613dbd9c4a backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None Apr 02 12:19:43 managed-node1 sudo[6028]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ljogdmcuqprxzxuiuozjdhaqsdhahckz ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610783.5670745-16407-260780952970185/AnsiballZ_systemd.py' Apr 02 12:19:43 managed-node1 sudo[6028]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:19:43 managed-node1 platform-python[6031]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None Apr 02 12:19:43 managed-node1 systemd[2094]: Reloading. Apr 02 12:19:43 managed-node1 sudo[6028]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:19:44 managed-node1 sudo[6165]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-puckajgawqrjkkkpecqywewerojvnthb ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610784.1022422-16425-81894877590039/AnsiballZ_systemd.py' Apr 02 12:19:44 managed-node1 sudo[6165]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:19:44 managed-node1 platform-python[6168]: ansible-systemd Invoked with name=quadlet-basic-mysql-volume.service scope=user state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None user=None Apr 02 12:19:44 managed-node1 systemd[2094]: Starting quadlet-basic-mysql-volume.service... -- Subject: Unit UNIT has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has begun starting up. Apr 02 12:19:44 managed-node1 quadlet-basic-mysql-volume[6174]: quadlet-basic-mysql-name Apr 02 12:19:44 managed-node1 systemd[2094]: Started quadlet-basic-mysql-volume.service. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:19:44 managed-node1 sudo[6165]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:19:45 managed-node1 platform-python[6302]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:19:45 managed-node1 platform-python[6427]: ansible-command Invoked with _raw_params=getsubids user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:19:46 managed-node1 platform-python[6551]: ansible-command Invoked with _raw_params=getsubids -g user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:19:47 managed-node1 platform-python[6675]: ansible-command Invoked with creates=/var/lib/systemd/linger/user_quadlet_basic _raw_params=loginctl enable-linger user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Apr 02 12:19:48 managed-node1 platform-python[6798]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd state=directory owner=user_quadlet_basic group=1111 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Apr 02 12:19:48 managed-node1 platform-python[6921]: ansible-stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-volume.volume follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Apr 02 12:19:49 managed-node1 platform-python[7020]: ansible-copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1743610788.5443847-16647-92259202002266/source dest=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-volume.volume owner=user_quadlet_basic group=1111 mode=0644 follow=False _original_basename=systemd.j2 checksum=fd0ae560360afa5541b866560b1e849d25e216ef backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None Apr 02 12:19:49 managed-node1 sudo[7145]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jovgvhmokpkjhqpxzprpasfuoamrizuo ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610789.2776318-16676-209488902726583/AnsiballZ_systemd.py' Apr 02 12:19:49 managed-node1 sudo[7145]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:19:49 managed-node1 platform-python[7148]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None Apr 02 12:19:49 managed-node1 systemd[2094]: Reloading. Apr 02 12:19:49 managed-node1 sudo[7145]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:19:49 managed-node1 sudo[7282]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-usujrgvlkeqrbsltgfmetvfjwktlkwqq ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610789.8420897-16702-62719613914028/AnsiballZ_systemd.py' Apr 02 12:19:49 managed-node1 sudo[7282]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:19:50 managed-node1 platform-python[7285]: ansible-systemd Invoked with name=quadlet-basic-unused-volume-volume.service scope=user state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None user=None Apr 02 12:19:50 managed-node1 systemd[2094]: Starting quadlet-basic-unused-volume-volume.service... -- Subject: Unit UNIT has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has begun starting up. Apr 02 12:19:50 managed-node1 quadlet-basic-unused-volume-volume[7291]: systemd-quadlet-basic-unused-volume Apr 02 12:19:50 managed-node1 systemd[2094]: Started quadlet-basic-unused-volume-volume.service. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:19:50 managed-node1 sudo[7282]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:19:51 managed-node1 platform-python[7419]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:19:51 managed-node1 platform-python[7544]: ansible-command Invoked with _raw_params=getsubids user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:19:52 managed-node1 platform-python[7668]: ansible-command Invoked with _raw_params=getsubids -g user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:19:53 managed-node1 platform-python[7792]: ansible-command Invoked with creates=/var/lib/systemd/linger/user_quadlet_basic _raw_params=loginctl enable-linger user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Apr 02 12:19:54 managed-node1 sudo[7915]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-iypjygvzkyflnibhqorfneypkkjjnote ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610793.9685776-16873-176012171075012/AnsiballZ_podman_image.py' Apr 02 12:19:54 managed-node1 sudo[7915]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:19:54 managed-node1 systemd[2094]: Started podman-7920.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:19:54 managed-node1 systemd[2094]: Started podman-7927.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:20:00 managed-node1 systemd[2094]: Started podman-8003.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:20:00 managed-node1 sudo[7915]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:20:01 managed-node1 platform-python[8131]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd state=directory owner=user_quadlet_basic group=1111 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Apr 02 12:20:01 managed-node1 platform-python[8254]: ansible-stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.container follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Apr 02 12:20:02 managed-node1 platform-python[8353]: ansible-copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1743610801.5122783-17111-32695399448437/source dest=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.container owner=user_quadlet_basic group=1111 mode=0644 follow=False _original_basename=systemd.j2 checksum=0b6cac7929623f1059e78ef39b8b0a25169b28a6 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None Apr 02 12:20:02 managed-node1 sudo[8478]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-kvyljiymcedaqaxspdvpuxctpxsotwwd ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610802.2799761-17149-122371509001018/AnsiballZ_systemd.py' Apr 02 12:20:02 managed-node1 sudo[8478]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:20:02 managed-node1 platform-python[8481]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None Apr 02 12:20:02 managed-node1 systemd[2094]: Reloading. Apr 02 12:20:02 managed-node1 sudo[8478]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:20:02 managed-node1 sudo[8615]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-yyhirievjzsnqeiuuqiofakapiwnjkfj ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610802.8220572-17172-173866511689840/AnsiballZ_systemd.py' Apr 02 12:20:02 managed-node1 sudo[8615]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:20:03 managed-node1 platform-python[8618]: ansible-systemd Invoked with name=quadlet-basic-mysql.service scope=user state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None user=None Apr 02 12:20:03 managed-node1 systemd[2094]: Starting quadlet-basic-mysql.service... -- Subject: Unit UNIT has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has begun starting up. Apr 02 12:20:03 managed-node1 kernel: tun: Universal TUN/TAP device driver, 1.6 Apr 02 12:20:03 managed-node1 systemd[2094]: Started rootless-netns-32c1123d.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:20:03 managed-node1 kernel: bridge: filtering via arp/ip/ip6tables is no longer available by default. Update your scripts to load br_netfilter if you need this. Apr 02 12:20:03 managed-node1 kernel: IPv6: ADDRCONF(NETDEV_UP): veth88850b51: link is not ready Apr 02 12:20:03 managed-node1 kernel: cni-podman1: port 1(veth88850b51) entered blocking state Apr 02 12:20:03 managed-node1 kernel: cni-podman1: port 1(veth88850b51) entered disabled state Apr 02 12:20:03 managed-node1 kernel: device veth88850b51 entered promiscuous mode Apr 02 12:20:03 managed-node1 kernel: cni-podman1: port 1(veth88850b51) entered blocking state Apr 02 12:20:03 managed-node1 kernel: cni-podman1: port 1(veth88850b51) entered forwarding state Apr 02 12:20:03 managed-node1 kernel: cni-podman1: port 1(veth88850b51) entered disabled state Apr 02 12:20:03 managed-node1 kernel: IPv6: ADDRCONF(NETDEV_UP): eth0: link is not ready Apr 02 12:20:03 managed-node1 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): eth0: link becomes ready Apr 02 12:20:03 managed-node1 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): veth88850b51: link becomes ready Apr 02 12:20:03 managed-node1 kernel: cni-podman1: port 1(veth88850b51) entered blocking state Apr 02 12:20:03 managed-node1 kernel: cni-podman1: port 1(veth88850b51) entered forwarding state Apr 02 12:20:03 managed-node1 dnsmasq[8782]: listening on cni-podman1(#3): 192.168.29.1 Apr 02 12:20:03 managed-node1 dnsmasq[8784]: started, version 2.79 cachesize 150 Apr 02 12:20:03 managed-node1 dnsmasq[8784]: compile time options: IPv6 GNU-getopt DBus no-i18n IDN2 DHCP DHCPv6 no-Lua TFTP no-conntrack ipset auth DNSSEC loop-detect inotify Apr 02 12:20:03 managed-node1 dnsmasq[8784]: using local addresses only for domain dns.podman Apr 02 12:20:03 managed-node1 dnsmasq[8784]: reading /etc/resolv.conf Apr 02 12:20:03 managed-node1 dnsmasq[8784]: using local addresses only for domain dns.podman Apr 02 12:20:03 managed-node1 dnsmasq[8784]: using nameserver 10.0.2.3#53 Apr 02 12:20:03 managed-node1 dnsmasq[8784]: using nameserver 10.29.169.13#53 Apr 02 12:20:03 managed-node1 dnsmasq[8784]: using nameserver 10.29.170.12#53 Apr 02 12:20:03 managed-node1 dnsmasq[8784]: using nameserver 10.2.32.1#53 Apr 02 12:20:03 managed-node1 dnsmasq[8784]: read /run/user/1111/containers/cni/dnsname/quadlet-basic-name/addnhosts - 1 addresses Apr 02 12:20:03 managed-node1 systemd[2094]: Started quadlet-basic-mysql.service. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:20:03 managed-node1 quadlet-basic-mysql[8624]: 8d09adcd720873503dcb49963dfd744e68864a0e6bf9754fdbbf0b6c2202978c Apr 02 12:20:03 managed-node1 sudo[8615]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:20:04 managed-node1 platform-python[8976]: ansible-command Invoked with _raw_params=cat /home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.container warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:20:04 managed-node1 platform-python[9100]: ansible-command Invoked with _raw_params=cat /home/user_quadlet_basic/.config/containers/systemd/quadlet-basic.network warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:20:05 managed-node1 platform-python[9235]: ansible-command Invoked with _raw_params=cat /home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.volume warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:20:05 managed-node1 dnsmasq[8784]: listening on cni-podman1(#3): fe80::a0dd:35ff:fe64:15e9%cni-podman1 Apr 02 12:20:05 managed-node1 platform-python[9359]: ansible-stat Invoked with path=/var/lib/systemd/linger/user_quadlet_basic follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:20:08 managed-node1 platform-python[9634]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:20:09 managed-node1 platform-python[9791]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:20:10 managed-node1 platform-python[9925]: ansible-command Invoked with _raw_params=getsubids user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:20:11 managed-node1 platform-python[10049]: ansible-command Invoked with _raw_params=getsubids -g user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:20:14 managed-node1 platform-python[10203]: ansible-stat Invoked with path=/run/user/1111 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:20:14 managed-node1 sudo[10328]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-funlfdvaygripnvlsbraadlfvfkldpee ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610814.3065672-17683-239621837864015/AnsiballZ_podman_secret.py' Apr 02 12:20:14 managed-node1 sudo[10328]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:20:14 managed-node1 systemd[2094]: Started podman-10333.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:20:14 managed-node1 sudo[10328]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:20:15 managed-node1 chronyd[587]: Selected source 5.161.196.15 (2.centos.pool.ntp.org) Apr 02 12:20:16 managed-node1 platform-python[10462]: ansible-stat Invoked with path=/run/user/1111 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:20:16 managed-node1 sudo[10587]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ltcctflhtodqehxbaqlyntwojwplygte ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610816.5145037-17794-8709007803636/AnsiballZ_podman_secret.py' Apr 02 12:20:16 managed-node1 sudo[10587]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:20:16 managed-node1 systemd[2094]: Started podman-10592.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:20:16 managed-node1 sudo[10587]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:20:17 managed-node1 platform-python[10720]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:20:18 managed-node1 platform-python[10845]: ansible-command Invoked with _raw_params=getsubids user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:20:18 managed-node1 platform-python[10969]: ansible-command Invoked with _raw_params=getsubids -g user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:20:20 managed-node1 platform-python[11093]: ansible-stat Invoked with path=/run/user/1111 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:20:20 managed-node1 sudo[11218]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mabxigelhqqdnvwmonsyguhwjmxgnuva ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610820.1947865-17967-170183610226848/AnsiballZ_systemd.py' Apr 02 12:20:20 managed-node1 sudo[11218]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:20:20 managed-node1 platform-python[11221]: ansible-systemd Invoked with name=quadlet-basic-mysql.service scope=user state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None user=None Apr 02 12:20:20 managed-node1 systemd[2094]: Reloading. Apr 02 12:20:20 managed-node1 systemd[2094]: Stopping quadlet-basic-mysql.service... -- Subject: Unit UNIT has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has begun shutting down. Apr 02 12:20:22 managed-node1 kernel: cni-podman1: port 1(veth88850b51) entered disabled state Apr 02 12:20:22 managed-node1 kernel: device veth88850b51 left promiscuous mode Apr 02 12:20:22 managed-node1 kernel: cni-podman1: port 1(veth88850b51) entered disabled state Apr 02 12:20:22 managed-node1 quadlet-basic-mysql[11237]: 8d09adcd720873503dcb49963dfd744e68864a0e6bf9754fdbbf0b6c2202978c Apr 02 12:20:22 managed-node1 systemd[2094]: Stopped quadlet-basic-mysql.service. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Apr 02 12:20:22 managed-node1 sudo[11218]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:20:23 managed-node1 platform-python[11484]: ansible-stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.container follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:20:24 managed-node1 platform-python[11732]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.container state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Apr 02 12:20:24 managed-node1 sudo[11855]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-slwgudlgtjvmaxyhlcyftugofkfmukzo ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610824.3035743-18096-181648561854345/AnsiballZ_systemd.py' Apr 02 12:20:24 managed-node1 sudo[11855]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:20:24 managed-node1 platform-python[11858]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None Apr 02 12:20:24 managed-node1 systemd[2094]: Reloading. Apr 02 12:20:24 managed-node1 sudo[11855]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:20:25 managed-node1 sudo[11992]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-aftanpapsnpigtwvwvxmyjrhxbahzvhv ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610824.9232528-18122-49806088322106/AnsiballZ_command.py' Apr 02 12:20:25 managed-node1 sudo[11992]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:20:25 managed-node1 systemd[2094]: Started podman-11997.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:20:25 managed-node1 sudo[11992]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:20:25 managed-node1 sudo[12125]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lezjmiqowwpfzotlatmtkttmzvrrclvt ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610825.5794458-18154-1160350767958/AnsiballZ_command.py' Apr 02 12:20:25 managed-node1 sudo[12125]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:20:25 managed-node1 platform-python[12128]: ansible-command Invoked with _raw_params=podman image prune --all -f warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:20:25 managed-node1 systemd[2094]: Started podman-12130.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:20:26 managed-node1 sudo[12125]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:20:26 managed-node1 sudo[12258]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tiyeequfsfpnafvucqsiiawhgrpyjyer ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610826.6186526-18196-236822343795742/AnsiballZ_command.py' Apr 02 12:20:26 managed-node1 sudo[12258]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:20:26 managed-node1 platform-python[12261]: ansible-command Invoked with _raw_params=podman images -n warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:20:26 managed-node1 systemd[2094]: Started podman-12263.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:20:26 managed-node1 sudo[12258]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:20:27 managed-node1 sudo[12391]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-sclovjfxnkwarlovcaxweinudjepcumm ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610827.0922542-18216-265934429043997/AnsiballZ_command.py' Apr 02 12:20:27 managed-node1 sudo[12391]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:20:27 managed-node1 platform-python[12394]: ansible-command Invoked with _raw_params=podman volume ls -n warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:20:27 managed-node1 systemd[2094]: Started podman-12396.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:20:27 managed-node1 sudo[12391]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:20:27 managed-node1 sudo[12524]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-eofxbazybslmbmcuynjvamdydtckzage ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610827.5954287-18239-259366008006252/AnsiballZ_command.py' Apr 02 12:20:27 managed-node1 sudo[12524]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:20:27 managed-node1 platform-python[12527]: ansible-command Invoked with _raw_params=podman ps --noheading warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:20:27 managed-node1 systemd[2094]: Started podman-12529.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:20:27 managed-node1 sudo[12524]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:20:28 managed-node1 sudo[12657]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-uqdeiewzpdxdobcelenkzjtrzsygncgo ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610828.0860744-18267-281264743329458/AnsiballZ_command.py' Apr 02 12:20:28 managed-node1 sudo[12657]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:20:28 managed-node1 platform-python[12660]: ansible-command Invoked with _raw_params=podman network ls -n -q warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:20:28 managed-node1 systemd[2094]: Started podman-12662.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:20:28 managed-node1 sudo[12657]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:20:28 managed-node1 sudo[12841]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-znuhknartkiwsvrtftduevckoknkgunf ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610828.6076722-18289-266555950715288/AnsiballZ_command.py' Apr 02 12:20:28 managed-node1 sudo[12841]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:20:28 managed-node1 systemd[2094]: Started podman-12846.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:20:28 managed-node1 sudo[12841]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:20:29 managed-node1 sudo[12974]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-pwbkmzbdicesyyejbwiikjucmbmtvamx ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610829.1075861-18318-101809740457125/AnsiballZ_command.py' Apr 02 12:20:29 managed-node1 sudo[12974]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:20:29 managed-node1 systemd[2094]: Started podman-12979.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:20:29 managed-node1 sudo[12974]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:20:29 managed-node1 sudo[13107]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tgajrcjuwtzdgdgkpdgpgbdvrpyabwjy ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610829.597791-18336-18767587249218/AnsiballZ_service_facts.py' Apr 02 12:20:29 managed-node1 sudo[13107]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:20:30 managed-node1 platform-python[13110]: ansible-service_facts Invoked Apr 02 12:20:31 managed-node1 sudo[13107]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:20:32 managed-node1 platform-python[13323]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:20:33 managed-node1 platform-python[13448]: ansible-command Invoked with _raw_params=getsubids user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:20:33 managed-node1 platform-python[13572]: ansible-command Invoked with _raw_params=getsubids -g user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:20:34 managed-node1 platform-python[13696]: ansible-stat Invoked with path=/run/user/1111 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:20:35 managed-node1 sudo[13821]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-obusvotjzbirwgbkuuxicrciuywmewym ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610834.946328-18571-21175555761533/AnsiballZ_systemd.py' Apr 02 12:20:35 managed-node1 sudo[13821]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:20:35 managed-node1 platform-python[13824]: ansible-systemd Invoked with name=quadlet-basic-unused-volume-volume.service scope=user state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None user=None Apr 02 12:20:35 managed-node1 systemd[2094]: Reloading. Apr 02 12:20:35 managed-node1 systemd[2094]: Stopped quadlet-basic-unused-volume-volume.service. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Apr 02 12:20:35 managed-node1 sudo[13821]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:20:35 managed-node1 platform-python[13961]: ansible-stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-volume.volume follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:20:36 managed-node1 platform-python[14209]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-volume.volume state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Apr 02 12:20:37 managed-node1 sudo[14332]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ewmnhhrstkqpkrmtntadmghghmikicgx ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610837.1220455-18677-81960673403098/AnsiballZ_systemd.py' Apr 02 12:20:37 managed-node1 sudo[14332]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:20:37 managed-node1 platform-python[14335]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None Apr 02 12:20:37 managed-node1 systemd[2094]: Reloading. Apr 02 12:20:37 managed-node1 sudo[14332]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:20:37 managed-node1 sudo[14469]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ewaqlgviqdpaqfxqylnzmcplctdjjjqt ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610837.731338-18698-50325849730423/AnsiballZ_command.py' Apr 02 12:20:37 managed-node1 sudo[14469]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:20:38 managed-node1 systemd[2094]: Started podman-14474.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:20:38 managed-node1 sudo[14469]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:20:38 managed-node1 sudo[14603]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tqozbkbsfqvmzrmtttksmdotodtomfal ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610838.3074484-18725-51260005599908/AnsiballZ_command.py' Apr 02 12:20:38 managed-node1 sudo[14603]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:20:38 managed-node1 platform-python[14606]: ansible-command Invoked with _raw_params=podman image prune --all -f warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:20:38 managed-node1 systemd[2094]: Started podman-14608.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:20:38 managed-node1 sudo[14603]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:20:39 managed-node1 sudo[14736]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rkhnezjrqfbciiqxbgkocphfucxgdyrh ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610839.0818572-18766-228381899175929/AnsiballZ_command.py' Apr 02 12:20:39 managed-node1 sudo[14736]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:20:39 managed-node1 platform-python[14739]: ansible-command Invoked with _raw_params=podman images -n warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:20:39 managed-node1 systemd[2094]: Started podman-14741.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:20:39 managed-node1 sudo[14736]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:20:39 managed-node1 sudo[14869]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xlakwetwtlvkxaltoypccgnvkmeocdoh ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610839.5645545-18782-198249633338053/AnsiballZ_command.py' Apr 02 12:20:39 managed-node1 sudo[14869]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:20:39 managed-node1 platform-python[14872]: ansible-command Invoked with _raw_params=podman volume ls -n warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:20:39 managed-node1 systemd[2094]: Started podman-14874.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:20:39 managed-node1 sudo[14869]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:20:40 managed-node1 sudo[15003]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-pihrbeirakexauhswpoaxuctxxqtqzos ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610840.1006324-18809-146516880666736/AnsiballZ_command.py' Apr 02 12:20:40 managed-node1 sudo[15003]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:20:40 managed-node1 platform-python[15006]: ansible-command Invoked with _raw_params=podman ps --noheading warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:20:40 managed-node1 systemd[2094]: Started podman-15008.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:20:40 managed-node1 sudo[15003]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:20:40 managed-node1 sudo[15136]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vbvctrqmawagixkkacqejyaabmngoxwp ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610840.587814-18833-59437032210130/AnsiballZ_command.py' Apr 02 12:20:40 managed-node1 sudo[15136]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:20:40 managed-node1 platform-python[15139]: ansible-command Invoked with _raw_params=podman network ls -n -q warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:20:40 managed-node1 systemd[2094]: Started podman-15141.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:20:40 managed-node1 sudo[15136]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:20:41 managed-node1 sudo[15320]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-usowrkroxngkkvfcvlwltwqqdvrdvpex ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610841.1005275-18854-219136101458965/AnsiballZ_command.py' Apr 02 12:20:41 managed-node1 sudo[15320]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:20:41 managed-node1 systemd[2094]: Started podman-15325.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:20:41 managed-node1 sudo[15320]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:20:41 managed-node1 sudo[15453]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-thiyuccapeqjfbyipozkanvfjyjawoyn ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610841.6115565-18881-142148621405390/AnsiballZ_command.py' Apr 02 12:20:41 managed-node1 sudo[15453]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:20:41 managed-node1 systemd[2094]: Started podman-15458.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:20:41 managed-node1 sudo[15453]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:20:42 managed-node1 sudo[15586]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mzithdgtjyjjwdyphoptcnepghajrjxc ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610842.0831704-18901-258794156665539/AnsiballZ_service_facts.py' Apr 02 12:20:42 managed-node1 sudo[15586]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:20:42 managed-node1 platform-python[15589]: ansible-service_facts Invoked Apr 02 12:20:43 managed-node1 sudo[15586]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:20:44 managed-node1 platform-python[15802]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:20:45 managed-node1 platform-python[15927]: ansible-command Invoked with _raw_params=getsubids user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:20:45 managed-node1 platform-python[16051]: ansible-command Invoked with _raw_params=getsubids -g user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:20:47 managed-node1 platform-python[16175]: ansible-stat Invoked with path=/run/user/1111 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:20:47 managed-node1 sudo[16300]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ezekutrrchhyfnpgwnhbbclfwvlslyyp ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610847.2342274-19122-90512736301404/AnsiballZ_systemd.py' Apr 02 12:20:47 managed-node1 sudo[16300]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:20:47 managed-node1 platform-python[16303]: ansible-systemd Invoked with name=quadlet-basic-mysql-volume.service scope=user state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None user=None Apr 02 12:20:47 managed-node1 systemd[2094]: Reloading. Apr 02 12:20:47 managed-node1 systemd[2094]: Stopped quadlet-basic-mysql-volume.service. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Apr 02 12:20:47 managed-node1 sudo[16300]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:20:48 managed-node1 platform-python[16440]: ansible-stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.volume follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:20:49 managed-node1 platform-python[16688]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.volume state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Apr 02 12:20:49 managed-node1 sudo[16811]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ohojyigkmzoaxrcupuaugxzgibuhytsr ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610849.4066012-19355-134138052139495/AnsiballZ_systemd.py' Apr 02 12:20:49 managed-node1 sudo[16811]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:20:49 managed-node1 platform-python[16814]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None Apr 02 12:20:49 managed-node1 systemd[2094]: Reloading. Apr 02 12:20:49 managed-node1 sudo[16811]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:20:50 managed-node1 sudo[16948]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-fdpnasumsnoaxifumghdztlvhqhnlgmi ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610849.9597473-19370-91140335855834/AnsiballZ_command.py' Apr 02 12:20:50 managed-node1 sudo[16948]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:20:50 managed-node1 systemd[2094]: Started podman-16953.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:20:50 managed-node1 sudo[16948]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:20:50 managed-node1 sudo[17081]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ttcuuxstaloxgcccwjpbvdqqaroeejfn ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610850.6167877-19404-223159048442299/AnsiballZ_command.py' Apr 02 12:20:50 managed-node1 sudo[17081]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:20:50 managed-node1 platform-python[17084]: ansible-command Invoked with _raw_params=podman image prune --all -f warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:20:50 managed-node1 systemd[2094]: Started podman-17086.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:20:50 managed-node1 sudo[17081]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:20:51 managed-node1 sudo[17214]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qewcrvsbpfswcohnulgqcwuhqqvabxjk ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610851.345593-19435-200414060941575/AnsiballZ_command.py' Apr 02 12:20:51 managed-node1 sudo[17214]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:20:51 managed-node1 platform-python[17217]: ansible-command Invoked with _raw_params=podman images -n warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:20:51 managed-node1 systemd[2094]: Started podman-17219.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:20:51 managed-node1 sudo[17214]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:20:51 managed-node1 sudo[17347]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-fgbtmyfkfhtdlbqkrffqccumtlqbrddm ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610851.7853332-19449-13984936675539/AnsiballZ_command.py' Apr 02 12:20:51 managed-node1 sudo[17347]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:20:52 managed-node1 platform-python[17350]: ansible-command Invoked with _raw_params=podman volume ls -n warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:20:52 managed-node1 systemd[2094]: Started podman-17352.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:20:52 managed-node1 sudo[17347]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:20:52 managed-node1 sudo[17480]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qqmjytwbforajuscrppanpdtivxvyalp ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610852.2213001-19458-69033822305415/AnsiballZ_command.py' Apr 02 12:20:52 managed-node1 sudo[17480]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:20:52 managed-node1 platform-python[17483]: ansible-command Invoked with _raw_params=podman ps --noheading warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:20:52 managed-node1 systemd[2094]: Started podman-17485.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:20:52 managed-node1 sudo[17480]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:20:52 managed-node1 sudo[17614]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-uxrjtjgiwurdvqbawmrtqwhhbixhutns ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610852.673239-19472-24238728876016/AnsiballZ_command.py' Apr 02 12:20:52 managed-node1 sudo[17614]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:20:52 managed-node1 platform-python[17617]: ansible-command Invoked with _raw_params=podman network ls -n -q warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:20:52 managed-node1 systemd[2094]: Started podman-17619.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:20:53 managed-node1 sudo[17614]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:20:53 managed-node1 sudo[17798]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-izzyidlwlekultkjouxmqkguawtvnrmt ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610853.1417243-19481-172834280887266/AnsiballZ_command.py' Apr 02 12:20:53 managed-node1 sudo[17798]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:20:53 managed-node1 systemd[2094]: Started podman-17803.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:20:53 managed-node1 sudo[17798]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:20:53 managed-node1 sudo[17931]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-sqbiiftdztdxlwzzzjujvseqqiihqavk ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610853.5756948-19495-26250434628358/AnsiballZ_command.py' Apr 02 12:20:53 managed-node1 sudo[17931]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:20:53 managed-node1 systemd[2094]: Started podman-17936.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:20:53 managed-node1 sudo[17931]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:20:54 managed-node1 sudo[18064]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-drkjjhshchherkfimnytzkujpcercrvg ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610854.0134041-19504-255760292905919/AnsiballZ_service_facts.py' Apr 02 12:20:54 managed-node1 sudo[18064]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:20:54 managed-node1 platform-python[18067]: ansible-service_facts Invoked Apr 02 12:20:55 managed-node1 sudo[18064]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:20:56 managed-node1 platform-python[18280]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:20:56 managed-node1 platform-python[18405]: ansible-command Invoked with _raw_params=getsubids user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:20:57 managed-node1 platform-python[18529]: ansible-command Invoked with _raw_params=getsubids -g user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:20:58 managed-node1 platform-python[18653]: ansible-stat Invoked with path=/run/user/1111 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:20:58 managed-node1 sudo[18778]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xhcqsjqvrxulwpmszlazdggkgcqwlcqt ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610858.4850607-19620-267247932011813/AnsiballZ_systemd.py' Apr 02 12:20:58 managed-node1 sudo[18778]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:20:58 managed-node1 platform-python[18781]: ansible-systemd Invoked with name=quadlet-basic-unused-network-network.service scope=user state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None user=None Apr 02 12:20:58 managed-node1 systemd[2094]: Reloading. Apr 02 12:20:58 managed-node1 systemd[2094]: Stopped quadlet-basic-unused-network-network.service. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Apr 02 12:20:58 managed-node1 sudo[18778]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:20:59 managed-node1 platform-python[18918]: ansible-stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-network.network follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:21:00 managed-node1 platform-python[19166]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-network.network state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Apr 02 12:21:00 managed-node1 sudo[19289]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-elaxqawqmnxhhnrlxqxizmpsewrhfqwb ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610860.5308206-19701-177570039937950/AnsiballZ_systemd.py' Apr 02 12:21:00 managed-node1 sudo[19289]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:21:00 managed-node1 platform-python[19292]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None Apr 02 12:21:00 managed-node1 systemd[2094]: Reloading. Apr 02 12:21:00 managed-node1 sudo[19289]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:21:01 managed-node1 sudo[19426]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ggxzljobieijavuupldrrxoyfkvjacuq ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610861.10733-19718-8140458563639/AnsiballZ_command.py' Apr 02 12:21:01 managed-node1 sudo[19426]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:21:01 managed-node1 systemd[2094]: Started podman-19431.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:21:01 managed-node1 sudo[19426]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:21:01 managed-node1 sudo[19609]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bkdfczsbkjxfvjvvexoolubicefkvtzb ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610861.72709-19742-145280521386529/AnsiballZ_command.py' Apr 02 12:21:01 managed-node1 sudo[19609]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:21:01 managed-node1 platform-python[19612]: ansible-command Invoked with _raw_params=podman image prune --all -f warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:21:02 managed-node1 systemd[2094]: Started podman-19614.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:21:02 managed-node1 sudo[19609]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:21:02 managed-node1 sudo[19742]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jmtxyvcczcwzfwnxhzpezoqfqbwtlfza ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610862.4313958-19768-6951061666702/AnsiballZ_command.py' Apr 02 12:21:02 managed-node1 sudo[19742]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:21:02 managed-node1 platform-python[19745]: ansible-command Invoked with _raw_params=podman images -n warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:21:02 managed-node1 systemd[2094]: Started podman-19747.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:21:02 managed-node1 sudo[19742]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:21:03 managed-node1 sudo[19875]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cuyxztdoxhyuilifgzujknhwqdbohath ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610862.8968573-19792-189283443037679/AnsiballZ_command.py' Apr 02 12:21:03 managed-node1 sudo[19875]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:21:03 managed-node1 platform-python[19878]: ansible-command Invoked with _raw_params=podman volume ls -n warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:21:03 managed-node1 systemd[2094]: Started podman-19880.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:21:03 managed-node1 sudo[19875]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:21:03 managed-node1 sudo[20009]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-xypvgubvjolzcrousocwrhkewhlbvhhg ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610863.3756895-19812-150341420754839/AnsiballZ_command.py' Apr 02 12:21:03 managed-node1 sudo[20009]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:21:03 managed-node1 platform-python[20012]: ansible-command Invoked with _raw_params=podman ps --noheading warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:21:03 managed-node1 systemd[2094]: Started podman-20014.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:21:03 managed-node1 sudo[20009]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:21:04 managed-node1 sudo[20142]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-nqchypojscmjzwvwwirhqsumlwpauzhl ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610863.8618302-19834-90520653667349/AnsiballZ_command.py' Apr 02 12:21:04 managed-node1 sudo[20142]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:21:04 managed-node1 platform-python[20145]: ansible-command Invoked with _raw_params=podman network ls -n -q warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:21:04 managed-node1 systemd[2094]: Started podman-20147.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:21:04 managed-node1 sudo[20142]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:21:04 managed-node1 sudo[20300]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-adqkygotakxzduxbekqgawestxdaommx ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610864.329015-19854-31133991363006/AnsiballZ_command.py' Apr 02 12:21:04 managed-node1 sudo[20300]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:21:04 managed-node1 systemd[2094]: Started podman-20305.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:21:04 managed-node1 sudo[20300]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:21:04 managed-node1 sudo[20433]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tlunbyfsigukmkzrzxnzpmhqmbmfegzn ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610864.7683825-19877-40906314206892/AnsiballZ_command.py' Apr 02 12:21:04 managed-node1 sudo[20433]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:21:05 managed-node1 systemd[2094]: Started podman-20438.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:21:05 managed-node1 sudo[20433]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:21:05 managed-node1 sudo[20566]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lnsscikqxvmlxvgwswqkvzkbpblpsfiy ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610865.2153862-19899-135809603983731/AnsiballZ_service_facts.py' Apr 02 12:21:05 managed-node1 sudo[20566]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:21:05 managed-node1 platform-python[20569]: ansible-service_facts Invoked Apr 02 12:21:06 managed-node1 sudo[20566]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:21:07 managed-node1 platform-python[20782]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:21:08 managed-node1 platform-python[20907]: ansible-command Invoked with _raw_params=getsubids user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:21:08 managed-node1 platform-python[21031]: ansible-command Invoked with _raw_params=getsubids -g user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:21:09 managed-node1 platform-python[21155]: ansible-stat Invoked with path=/run/user/1111 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:21:10 managed-node1 sudo[21280]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-wipfeexpexrayhjijqenauffntutsggu ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610870.14129-20099-268899250074739/AnsiballZ_systemd.py' Apr 02 12:21:10 managed-node1 sudo[21280]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:21:10 managed-node1 platform-python[21283]: ansible-systemd Invoked with name=quadlet-basic-network.service scope=user state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None user=None Apr 02 12:21:10 managed-node1 systemd[2094]: Reloading. Apr 02 12:21:10 managed-node1 systemd[2094]: Stopped quadlet-basic-network.service. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Apr 02 12:21:10 managed-node1 sudo[21280]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:21:11 managed-node1 platform-python[21420]: ansible-stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic.network follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:21:12 managed-node1 platform-python[21668]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic.network state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Apr 02 12:21:12 managed-node1 sudo[21791]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-udvamhrlltmycnnyreaxpugljeyjtnjf ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610872.3462439-20215-70949079352750/AnsiballZ_systemd.py' Apr 02 12:21:12 managed-node1 sudo[21791]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:21:12 managed-node1 platform-python[21794]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None Apr 02 12:21:12 managed-node1 systemd[2094]: Reloading. Apr 02 12:21:12 managed-node1 sudo[21791]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:21:13 managed-node1 sudo[21928]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jybduphkliiceuslnzifghjsqpfqgnwe ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610872.9126391-20238-72383612437130/AnsiballZ_command.py' Apr 02 12:21:13 managed-node1 sudo[21928]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:21:13 managed-node1 systemd[2094]: Started podman-21933.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:21:13 managed-node1 sudo[21928]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:21:13 managed-node1 sudo[22086]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-haxmyqnowvhpxlbizdjqbfkikehxjibp ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610873.4708867-20251-101825760962890/AnsiballZ_command.py' Apr 02 12:21:13 managed-node1 sudo[22086]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:21:13 managed-node1 platform-python[22089]: ansible-command Invoked with _raw_params=podman image prune --all -f warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:21:13 managed-node1 systemd[2094]: Started podman-22091.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:21:13 managed-node1 sudo[22086]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:21:14 managed-node1 sudo[22219]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-piuolvgnjvhxyqmbrnlmzgeyrfcjrxcb ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610874.1289935-20273-143417743643243/AnsiballZ_command.py' Apr 02 12:21:14 managed-node1 sudo[22219]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:21:14 managed-node1 platform-python[22222]: ansible-command Invoked with _raw_params=podman images -n warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:21:14 managed-node1 systemd[2094]: Started podman-22224.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:21:14 managed-node1 sudo[22219]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:21:14 managed-node1 sudo[22352]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-whezzubntigvfbhqeooitemlciwawiuw ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610874.578167-20282-238409301226469/AnsiballZ_command.py' Apr 02 12:21:14 managed-node1 sudo[22352]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:21:14 managed-node1 platform-python[22355]: ansible-command Invoked with _raw_params=podman volume ls -n warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:21:14 managed-node1 systemd[2094]: Started podman-22357.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:21:14 managed-node1 sudo[22352]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:21:15 managed-node1 sudo[22485]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-pxcsyfhzxghwhfhiimlbdwfdcrqjyhog ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610875.1054816-20296-46253218532815/AnsiballZ_command.py' Apr 02 12:21:15 managed-node1 sudo[22485]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:21:15 managed-node1 platform-python[22488]: ansible-command Invoked with _raw_params=podman ps --noheading warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:21:15 managed-node1 systemd[2094]: Started podman-22490.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:21:15 managed-node1 sudo[22485]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:21:15 managed-node1 sudo[22618]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-bjfkayclzlilripwxmuygrjeqhxncikd ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610875.5781698-20322-147732988083033/AnsiballZ_command.py' Apr 02 12:21:15 managed-node1 sudo[22618]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:21:15 managed-node1 platform-python[22621]: ansible-command Invoked with _raw_params=podman network ls -n -q warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:21:15 managed-node1 systemd[2094]: Started podman-22623.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:21:15 managed-node1 sudo[22618]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:21:16 managed-node1 sudo[22751]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ohtzqfntfrcumqasjbauwobtvbuhbinb ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610876.0440228-20342-35862964346686/AnsiballZ_command.py' Apr 02 12:21:16 managed-node1 sudo[22751]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:21:16 managed-node1 systemd[2094]: Started podman-22756.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:21:16 managed-node1 sudo[22751]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:21:16 managed-node1 sudo[22884]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-kuxgbbrspdlqcpkvidpjibmsssasxvai ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610876.5113258-20357-252399808019449/AnsiballZ_command.py' Apr 02 12:21:16 managed-node1 sudo[22884]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:21:16 managed-node1 systemd[2094]: Started podman-22889.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:21:16 managed-node1 sudo[22884]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:21:17 managed-node1 sudo[23018]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mommwgviylgspiougzkrqvevkvhzteqf ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610877.0127833-20383-186542035876443/AnsiballZ_service_facts.py' Apr 02 12:21:17 managed-node1 sudo[23018]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:21:17 managed-node1 platform-python[23021]: ansible-service_facts Invoked Apr 02 12:21:18 managed-node1 sudo[23018]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:21:19 managed-node1 platform-python[23234]: ansible-stat Invoked with path=/run/user/1111 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:21:19 managed-node1 sudo[23359]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vkcvxodjgcdjcltmgpgsxkbseyakaccj ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610879.5712423-20483-267377686304657/AnsiballZ_podman_container_info.py' Apr 02 12:21:19 managed-node1 sudo[23359]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:21:20 managed-node1 platform-python[23362]: ansible-containers.podman.podman_container_info Invoked with executable=podman name=None Apr 02 12:21:20 managed-node1 systemd[2094]: Started podman-23364.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:21:20 managed-node1 sudo[23359]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:21:20 managed-node1 sudo[23492]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rhxqjxhfiuajelurnycylwtmnmwjsjhb ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610880.2457466-20515-194052659760307/AnsiballZ_command.py' Apr 02 12:21:20 managed-node1 sudo[23492]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:21:20 managed-node1 platform-python[23495]: ansible-command Invoked with _raw_params=podman network ls -q warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:21:20 managed-node1 systemd[2094]: Started podman-23497.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:21:20 managed-node1 sudo[23492]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:21:20 managed-node1 sudo[23625]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_basic ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-prqczsfydgphbeqbbdwzlmawrieoidff ; XDG_RUNTIME_DIR=/run/user/1111 /usr/libexec/platform-python /var/tmp/ansible-tmp-1743610880.7122793-20530-183472016067767/AnsiballZ_command.py' Apr 02 12:21:20 managed-node1 sudo[23625]: pam_unix(sudo:session): session opened for user user_quadlet_basic by root(uid=0) Apr 02 12:21:21 managed-node1 platform-python[23628]: ansible-command Invoked with _raw_params=podman secret ls -n -q warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:21:21 managed-node1 systemd[2094]: Started podman-23630.scope. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:21:21 managed-node1 sudo[23625]: pam_unix(sudo:session): session closed for user user_quadlet_basic Apr 02 12:21:21 managed-node1 platform-python[23758]: ansible-command Invoked with removes=/var/lib/systemd/linger/user_quadlet_basic _raw_params=loginctl disable-linger user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None stdin=None Apr 02 12:21:21 managed-node1 systemd[1]: Stopping User Manager for UID 1111... -- Subject: Unit user@1111.service has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user@1111.service has begun shutting down. Apr 02 12:21:21 managed-node1 systemd[2094]: Stopping podman-pause-a17f0318.scope. -- Subject: Unit UNIT has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has begun shutting down. Apr 02 12:21:21 managed-node1 systemd[2094]: Stopped target Default. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Apr 02 12:21:21 managed-node1 systemd[2094]: Stopping D-Bus User Message Bus... -- Subject: Unit UNIT has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has begun shutting down. Apr 02 12:21:21 managed-node1 systemd[2094]: Stopped D-Bus User Message Bus. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Apr 02 12:21:21 managed-node1 systemd[2094]: Stopped podman-pause-a17f0318.scope. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Apr 02 12:21:21 managed-node1 systemd[2094]: Removed slice user.slice. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Apr 02 12:21:21 managed-node1 systemd[2094]: Stopped target Basic System. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Apr 02 12:21:21 managed-node1 systemd[2094]: Stopped target Paths. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Apr 02 12:21:21 managed-node1 systemd[2094]: Stopped target Timers. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Apr 02 12:21:21 managed-node1 systemd[2094]: Stopped Mark boot as successful after the user session has run 2 minutes. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Apr 02 12:21:21 managed-node1 systemd[2094]: Stopped target Sockets. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Apr 02 12:21:21 managed-node1 systemd[2094]: Closed D-Bus User Message Bus Socket. -- Subject: Unit UNIT has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished shutting down. Apr 02 12:21:21 managed-node1 systemd[2094]: Reached target Shutdown. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:21:21 managed-node1 systemd[2094]: Started Exit the Session. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:21:21 managed-node1 systemd[2094]: Reached target Exit the Session. -- Subject: Unit UNIT has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit UNIT has finished starting up. -- -- The start-up result is done. Apr 02 12:21:21 managed-node1 systemd[1]: user@1111.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit user@1111.service has successfully entered the 'dead' state. Apr 02 12:21:21 managed-node1 systemd[1]: Stopped User Manager for UID 1111. -- Subject: Unit user@1111.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user@1111.service has finished shutting down. Apr 02 12:21:21 managed-node1 systemd[1]: Stopping User runtime directory /run/user/1111... -- Subject: Unit user-runtime-dir@1111.service has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user-runtime-dir@1111.service has begun shutting down. Apr 02 12:21:21 managed-node1 systemd[1]: run-user-1111.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit run-user-1111.mount has successfully entered the 'dead' state. Apr 02 12:21:21 managed-node1 systemd[1]: user-runtime-dir@1111.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit user-runtime-dir@1111.service has successfully entered the 'dead' state. Apr 02 12:21:21 managed-node1 systemd[1]: Stopped User runtime directory /run/user/1111. -- Subject: Unit user-runtime-dir@1111.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user-runtime-dir@1111.service has finished shutting down. Apr 02 12:21:21 managed-node1 systemd[1]: Removed slice User Slice of UID 1111. -- Subject: Unit user-1111.slice has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit user-1111.slice has finished shutting down. Apr 02 12:21:21 managed-node1 systemd[1]: user-1111.slice: Consumed 13.628s CPU time -- Subject: Resources consumed by unit runtime -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit user-1111.slice completed and consumed the indicated resources. Apr 02 12:21:21 managed-node1 platform-python[23883]: ansible-command Invoked with _raw_params=loginctl show-user --value -p State user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:21:22 managed-node1 platform-python[24007]: ansible-stat Invoked with path=/var/lib/systemd/linger/user_quadlet_basic follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:21:25 managed-node1 platform-python[24255]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:21:26 managed-node1 platform-python[24384]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Apr 02 12:21:27 managed-node1 platform-python[24508]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:21:31 managed-node1 systemd[1]: var-lib-containers-storage-overlay-metacopy\x2dcheck3873855117-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-metacopy\x2dcheck3873855117-merged.mount has successfully entered the 'dead' state. Apr 02 12:21:32 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Apr 02 12:21:33 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Apr 02 12:21:33 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Apr 02 12:21:34 managed-node1 platform-python[24936]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:21:37 managed-node1 platform-python[25061]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Apr 02 12:21:37 managed-node1 platform-python[25184]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-basic.network follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Apr 02 12:21:37 managed-node1 platform-python[25283]: ansible-copy Invoked with dest=/etc/containers/systemd/quadlet-basic.network owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1743610897.2148337-21372-133279900083913/source _original_basename=tmph5t7jeic follow=False checksum=19c9b17be2af9b9deca5c3bd327f048966750682 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None Apr 02 12:21:38 managed-node1 platform-python[25408]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None Apr 02 12:21:38 managed-node1 systemd[1]: Reloading. Apr 02 12:21:38 managed-node1 systemd[1]: /etc/systemd/system/user-0.slice:4: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument Apr 02 12:21:38 managed-node1 systemd[1]: /etc/systemd/system/user-.slice.d/override.conf:2: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument Apr 02 12:21:38 managed-node1 platform-python[25561]: ansible-systemd Invoked with name=quadlet-basic-network.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None user=None Apr 02 12:21:39 managed-node1 systemd[1]: Starting quadlet-basic-network.service... -- Subject: Unit quadlet-basic-network.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit quadlet-basic-network.service has begun starting up. Apr 02 12:21:39 managed-node1 quadlet-basic-network[25568]: quadlet-basic-name Apr 02 12:21:39 managed-node1 systemd[1]: Started quadlet-basic-network.service. -- Subject: Unit quadlet-basic-network.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit quadlet-basic-network.service has finished starting up. -- -- The start-up result is done. Apr 02 12:21:40 managed-node1 platform-python[25696]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:21:41 managed-node1 platform-python[25821]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Apr 02 12:21:42 managed-node1 platform-python[25944]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-basic-unused-network.network follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Apr 02 12:21:42 managed-node1 platform-python[26043]: ansible-copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1743610901.8747222-21524-125519104427091/source dest=/etc/containers/systemd/quadlet-basic-unused-network.network owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=52c9d75ecaf81203cc1f1a3b1dd00fcd25067b01 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None Apr 02 12:21:42 managed-node1 platform-python[26168]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None Apr 02 12:21:42 managed-node1 systemd[1]: Reloading. Apr 02 12:21:43 managed-node1 systemd[1]: /etc/systemd/system/user-0.slice:4: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument Apr 02 12:21:43 managed-node1 systemd[1]: /etc/systemd/system/user-.slice.d/override.conf:2: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument Apr 02 12:21:43 managed-node1 platform-python[26321]: ansible-systemd Invoked with name=quadlet-basic-unused-network-network.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None user=None Apr 02 12:21:43 managed-node1 systemd[1]: Starting quadlet-basic-unused-network-network.service... -- Subject: Unit quadlet-basic-unused-network-network.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit quadlet-basic-unused-network-network.service has begun starting up. Apr 02 12:21:43 managed-node1 quadlet-basic-unused-network-network[26328]: systemd-quadlet-basic-unused-network Apr 02 12:21:43 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Apr 02 12:21:43 managed-node1 systemd[1]: Started quadlet-basic-unused-network-network.service. -- Subject: Unit quadlet-basic-unused-network-network.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit quadlet-basic-unused-network-network.service has finished starting up. -- -- The start-up result is done. Apr 02 12:21:44 managed-node1 platform-python[26482]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:21:46 managed-node1 platform-python[26607]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Apr 02 12:21:46 managed-node1 platform-python[26730]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-basic-mysql.volume follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Apr 02 12:21:47 managed-node1 platform-python[26829]: ansible-copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1743610906.7097242-21707-17809242046552/source dest=/etc/containers/systemd/quadlet-basic-mysql.volume owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=90a3571bfc7670328fe3f8fb625585613dbd9c4a backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None Apr 02 12:21:47 managed-node1 platform-python[26954]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None Apr 02 12:21:47 managed-node1 systemd[1]: Reloading. Apr 02 12:21:47 managed-node1 systemd[1]: /etc/systemd/system/user-0.slice:4: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument Apr 02 12:21:47 managed-node1 systemd[1]: /etc/systemd/system/user-.slice.d/override.conf:2: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument Apr 02 12:21:48 managed-node1 platform-python[27107]: ansible-systemd Invoked with name=quadlet-basic-mysql-volume.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None user=None Apr 02 12:21:48 managed-node1 systemd[1]: Starting quadlet-basic-mysql-volume.service... -- Subject: Unit quadlet-basic-mysql-volume.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit quadlet-basic-mysql-volume.service has begun starting up. Apr 02 12:21:48 managed-node1 quadlet-basic-mysql-volume[27114]: quadlet-basic-mysql-name Apr 02 12:21:48 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Apr 02 12:21:48 managed-node1 systemd[1]: Started quadlet-basic-mysql-volume.service. -- Subject: Unit quadlet-basic-mysql-volume.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit quadlet-basic-mysql-volume.service has finished starting up. -- -- The start-up result is done. Apr 02 12:21:49 managed-node1 platform-python[27242]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:21:51 managed-node1 platform-python[27367]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Apr 02 12:21:52 managed-node1 platform-python[27490]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-basic-unused-volume.volume follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Apr 02 12:21:52 managed-node1 platform-python[27589]: ansible-copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1743610911.9956682-21960-107210692685763/source dest=/etc/containers/systemd/quadlet-basic-unused-volume.volume owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=fd0ae560360afa5541b866560b1e849d25e216ef backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None Apr 02 12:21:53 managed-node1 platform-python[27714]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None Apr 02 12:21:53 managed-node1 systemd[1]: Reloading. Apr 02 12:21:53 managed-node1 systemd[1]: /etc/systemd/system/user-0.slice:4: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument Apr 02 12:21:53 managed-node1 systemd[1]: /etc/systemd/system/user-.slice.d/override.conf:2: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument Apr 02 12:21:53 managed-node1 platform-python[27867]: ansible-systemd Invoked with name=quadlet-basic-unused-volume-volume.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None user=None Apr 02 12:21:53 managed-node1 systemd[1]: Starting quadlet-basic-unused-volume-volume.service... -- Subject: Unit quadlet-basic-unused-volume-volume.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit quadlet-basic-unused-volume-volume.service has begun starting up. Apr 02 12:21:53 managed-node1 quadlet-basic-unused-volume-volume[27874]: systemd-quadlet-basic-unused-volume Apr 02 12:21:53 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Apr 02 12:21:53 managed-node1 systemd[1]: Started quadlet-basic-unused-volume-volume.service. -- Subject: Unit quadlet-basic-unused-volume-volume.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit quadlet-basic-unused-volume-volume.service has finished starting up. -- -- The start-up result is done. Apr 02 12:21:55 managed-node1 platform-python[28003]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:22:04 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Apr 02 12:22:04 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Apr 02 12:22:05 managed-node1 platform-python[28342]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Apr 02 12:22:05 managed-node1 platform-python[28465]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-basic-mysql.container follow=False get_checksum=True checksum_algorithm=sha1 get_md5=False get_mime=True get_attributes=True Apr 02 12:22:06 managed-node1 platform-python[28564]: ansible-copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1743610925.566711-22356-256581358651919/source dest=/etc/containers/systemd/quadlet-basic-mysql.container owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=0b6cac7929623f1059e78ef39b8b0a25169b28a6 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None regexp=None delimiter=None Apr 02 12:22:06 managed-node1 platform-python[28689]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None Apr 02 12:22:06 managed-node1 systemd[1]: Reloading. Apr 02 12:22:06 managed-node1 systemd[1]: /etc/systemd/system/user-0.slice:4: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument Apr 02 12:22:06 managed-node1 systemd[1]: /etc/systemd/system/user-.slice.d/override.conf:2: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument Apr 02 12:22:07 managed-node1 platform-python[28842]: ansible-systemd Invoked with name=quadlet-basic-mysql.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None user=None Apr 02 12:22:07 managed-node1 systemd[1]: Starting quadlet-basic-mysql.service... -- Subject: Unit quadlet-basic-mysql.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit quadlet-basic-mysql.service has begun starting up. Apr 02 12:22:07 managed-node1 systemd-udevd[28915]: Using default interface naming scheme 'rhel-8.0'. Apr 02 12:22:07 managed-node1 NetworkManager[629]: [1743610927.4136] manager: (cni-podman1): new Bridge device (/org/freedesktop/NetworkManager/Devices/3) Apr 02 12:22:07 managed-node1 NetworkManager[629]: [1743610927.4146] manager: (vethbd11679c): new Veth device (/org/freedesktop/NetworkManager/Devices/4) Apr 02 12:22:07 managed-node1 kernel: IPv6: ADDRCONF(NETDEV_UP): vethbd11679c: link is not ready Apr 02 12:22:07 managed-node1 kernel: cni-podman1: port 1(vethbd11679c) entered blocking state Apr 02 12:22:07 managed-node1 kernel: cni-podman1: port 1(vethbd11679c) entered disabled state Apr 02 12:22:07 managed-node1 kernel: device vethbd11679c entered promiscuous mode Apr 02 12:22:07 managed-node1 kernel: cni-podman1: port 1(vethbd11679c) entered blocking state Apr 02 12:22:07 managed-node1 kernel: cni-podman1: port 1(vethbd11679c) entered forwarding state Apr 02 12:22:07 managed-node1 kernel: cni-podman1: port 1(vethbd11679c) entered disabled state Apr 02 12:22:07 managed-node1 systemd-udevd[28915]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable. Apr 02 12:22:07 managed-node1 systemd-udevd[28915]: Could not generate persistent MAC address for cni-podman1: No such file or directory Apr 02 12:22:07 managed-node1 systemd-udevd[28916]: link_config: autonegotiation is unset or enabled, the speed and duplex are not writable. Apr 02 12:22:07 managed-node1 systemd-udevd[28916]: Could not generate persistent MAC address for vethbd11679c: No such file or directory Apr 02 12:22:07 managed-node1 NetworkManager[629]: [1743610927.4409] device (cni-podman1): state change: unmanaged -> unavailable (reason 'connection-assumed', sys-iface-state: 'external') Apr 02 12:22:07 managed-node1 NetworkManager[629]: [1743610927.4414] device (cni-podman1): state change: unavailable -> disconnected (reason 'connection-assumed', sys-iface-state: 'external') Apr 02 12:22:07 managed-node1 NetworkManager[629]: [1743610927.4421] device (cni-podman1): Activation: starting connection 'cni-podman1' (1b6fdbee-a440-428e-8350-c02ac9474b6b) Apr 02 12:22:07 managed-node1 NetworkManager[629]: [1743610927.4422] device (cni-podman1): state change: disconnected -> prepare (reason 'none', sys-iface-state: 'external') Apr 02 12:22:07 managed-node1 NetworkManager[629]: [1743610927.4425] device (cni-podman1): state change: prepare -> config (reason 'none', sys-iface-state: 'external') Apr 02 12:22:07 managed-node1 NetworkManager[629]: [1743610927.4426] device (cni-podman1): state change: config -> ip-config (reason 'none', sys-iface-state: 'external') Apr 02 12:22:07 managed-node1 NetworkManager[629]: [1743610927.4427] device (cni-podman1): state change: ip-config -> ip-check (reason 'none', sys-iface-state: 'external') Apr 02 12:22:07 managed-node1 dbus-daemon[584]: [system] Activating via systemd: service name='org.freedesktop.nm_dispatcher' unit='dbus-org.freedesktop.nm-dispatcher.service' requested by ':1.2' (uid=0 pid=629 comm="/usr/sbin/NetworkManager --no-daemon " label="system_u:system_r:NetworkManager_t:s0") Apr 02 12:22:07 managed-node1 systemd[1]: Starting Network Manager Script Dispatcher Service... -- Subject: Unit NetworkManager-dispatcher.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit NetworkManager-dispatcher.service has begun starting up. Apr 02 12:22:07 managed-node1 kernel: IPv6: ADDRCONF(NETDEV_UP): eth0: link is not ready Apr 02 12:22:07 managed-node1 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): eth0: link becomes ready Apr 02 12:22:07 managed-node1 kernel: IPv6: ADDRCONF(NETDEV_CHANGE): vethbd11679c: link becomes ready Apr 02 12:22:07 managed-node1 kernel: cni-podman1: port 1(vethbd11679c) entered blocking state Apr 02 12:22:07 managed-node1 kernel: cni-podman1: port 1(vethbd11679c) entered forwarding state Apr 02 12:22:07 managed-node1 NetworkManager[629]: [1743610927.4731] device (vethbd11679c): carrier: link connected Apr 02 12:22:07 managed-node1 NetworkManager[629]: [1743610927.4733] device (cni-podman1): carrier: link connected Apr 02 12:22:07 managed-node1 dbus-daemon[584]: [system] Successfully activated service 'org.freedesktop.nm_dispatcher' Apr 02 12:22:07 managed-node1 systemd[1]: Started Network Manager Script Dispatcher Service. -- Subject: Unit NetworkManager-dispatcher.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit NetworkManager-dispatcher.service has finished starting up. -- -- The start-up result is done. Apr 02 12:22:07 managed-node1 NetworkManager[629]: [1743610927.4815] device (cni-podman1): state change: ip-check -> secondaries (reason 'none', sys-iface-state: 'external') Apr 02 12:22:07 managed-node1 NetworkManager[629]: [1743610927.4817] device (cni-podman1): state change: secondaries -> activated (reason 'none', sys-iface-state: 'external') Apr 02 12:22:07 managed-node1 NetworkManager[629]: [1743610927.4821] device (cni-podman1): Activation: successful, device activated. Apr 02 12:22:07 managed-node1 dnsmasq[29024]: listening on cni-podman1(#3): 192.168.29.1 Apr 02 12:22:07 managed-node1 dnsmasq[29028]: started, version 2.79 cachesize 150 Apr 02 12:22:07 managed-node1 dnsmasq[29028]: compile time options: IPv6 GNU-getopt DBus no-i18n IDN2 DHCP DHCPv6 no-Lua TFTP no-conntrack ipset auth DNSSEC loop-detect inotify Apr 02 12:22:07 managed-node1 dnsmasq[29028]: using local addresses only for domain dns.podman Apr 02 12:22:07 managed-node1 dnsmasq[29028]: reading /etc/resolv.conf Apr 02 12:22:07 managed-node1 dnsmasq[29028]: using local addresses only for domain dns.podman Apr 02 12:22:07 managed-node1 dnsmasq[29028]: using nameserver 10.29.169.13#53 Apr 02 12:22:07 managed-node1 dnsmasq[29028]: using nameserver 10.29.170.12#53 Apr 02 12:22:07 managed-node1 dnsmasq[29028]: using nameserver 10.2.32.1#53 Apr 02 12:22:07 managed-node1 dnsmasq[29028]: read /run/containers/cni/dnsname/quadlet-basic-name/addnhosts - 1 addresses Apr 02 12:22:07 managed-node1 kernel: Adding Red Hat flag eBPF/cgroup. Apr 02 12:22:07 managed-node1 systemd[1]: Started quadlet-basic-mysql.service. -- Subject: Unit quadlet-basic-mysql.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit quadlet-basic-mysql.service has finished starting up. -- -- The start-up result is done. Apr 02 12:22:07 managed-node1 quadlet-basic-mysql[28849]: b6b6d0d7dc865df4c1ee581fe8db037c8ffd13cca7c0a4431e0f17c10ef82bad Apr 02 12:22:08 managed-node1 platform-python[29220]: ansible-command Invoked with _raw_params=cat /etc/containers/systemd/quadlet-basic-mysql.container warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:22:08 managed-node1 platform-python[29344]: ansible-command Invoked with _raw_params=cat /etc/containers/systemd/quadlet-basic.network warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:22:09 managed-node1 platform-python[29479]: ansible-command Invoked with _raw_params=cat /etc/containers/systemd/quadlet-basic-mysql.volume warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:22:09 managed-node1 dnsmasq[29028]: listening on cni-podman1(#3): fe80::f01f:25ff:fe92:a7f8%cni-podman1 Apr 02 12:22:09 managed-node1 platform-python[29603]: ansible-command Invoked with _raw_params=podman exec quadlet-basic-mysql-name cat /tmp/test.json warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:22:12 managed-node1 platform-python[29923]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:22:13 managed-node1 platform-python[30080]: ansible-getent Invoked with database=passwd key=user_quadlet_basic fail_key=False service=None split=None Apr 02 12:22:13 managed-node1 platform-python[30204]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:22:14 managed-node1 platform-python[30338]: ansible-command Invoked with _raw_params=getsubids user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:22:14 managed-node1 platform-python[30462]: ansible-command Invoked with _raw_params=getsubids -g user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:22:17 managed-node1 systemd[1]: NetworkManager-dispatcher.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Apr 02 12:22:17 managed-node1 platform-python[30616]: ansible-stat Invoked with path=/run/user/1111 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:22:19 managed-node1 platform-python[30739]: ansible-stat Invoked with path=/run/user/1111 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:22:20 managed-node1 platform-python[30862]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:22:20 managed-node1 platform-python[30987]: ansible-command Invoked with _raw_params=getsubids user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:22:21 managed-node1 platform-python[31111]: ansible-command Invoked with _raw_params=getsubids -g user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:22:22 managed-node1 platform-python[31235]: ansible-stat Invoked with path=/run/user/1111 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:22:23 managed-node1 platform-python[31358]: ansible-stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.container follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:22:23 managed-node1 platform-python[31481]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.container state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Apr 02 12:22:24 managed-node1 chronyd[587]: Selected source 10.2.32.38 Apr 02 12:22:25 managed-node1 platform-python[31604]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:22:25 managed-node1 platform-python[31729]: ansible-command Invoked with _raw_params=getsubids user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:22:26 managed-node1 platform-python[31853]: ansible-command Invoked with _raw_params=getsubids -g user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:22:27 managed-node1 platform-python[31977]: ansible-stat Invoked with path=/run/user/1111 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:22:27 managed-node1 platform-python[32100]: ansible-stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-volume.volume follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:22:28 managed-node1 platform-python[32223]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-volume.volume state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Apr 02 12:22:30 managed-node1 platform-python[32346]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:22:30 managed-node1 platform-python[32471]: ansible-command Invoked with _raw_params=getsubids user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:22:31 managed-node1 platform-python[32595]: ansible-command Invoked with _raw_params=getsubids -g user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:22:32 managed-node1 platform-python[32719]: ansible-stat Invoked with path=/run/user/1111 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:22:32 managed-node1 platform-python[32842]: ansible-stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.volume follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:22:33 managed-node1 platform-python[32965]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-mysql.volume state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Apr 02 12:22:35 managed-node1 platform-python[33088]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:22:36 managed-node1 platform-python[33213]: ansible-command Invoked with _raw_params=getsubids user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:22:36 managed-node1 platform-python[33337]: ansible-command Invoked with _raw_params=getsubids -g user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:22:38 managed-node1 platform-python[33461]: ansible-stat Invoked with path=/run/user/1111 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:22:38 managed-node1 platform-python[33584]: ansible-stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-network.network follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:22:39 managed-node1 platform-python[33707]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic-unused-network.network state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Apr 02 12:22:40 managed-node1 platform-python[33830]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:22:41 managed-node1 platform-python[33955]: ansible-command Invoked with _raw_params=getsubids user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:22:41 managed-node1 platform-python[34079]: ansible-command Invoked with _raw_params=getsubids -g user_quadlet_basic warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:22:43 managed-node1 platform-python[34203]: ansible-stat Invoked with path=/run/user/1111 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:22:43 managed-node1 platform-python[34326]: ansible-stat Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic.network follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:22:44 managed-node1 platform-python[34449]: ansible-file Invoked with path=/home/user_quadlet_basic/.config/containers/systemd/quadlet-basic.network state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Apr 02 12:22:46 managed-node1 platform-python[34572]: ansible-stat Invoked with path=/run/user/1111 follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:22:47 managed-node1 platform-python[34695]: ansible-user Invoked with name=user_quadlet_basic uid=1111 state=absent non_unique=False force=False remove=False create_home=True system=False move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node1 update_password=always group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None Apr 02 12:22:47 managed-node1 userdel[34699]: delete user 'user_quadlet_basic' Apr 02 12:22:47 managed-node1 userdel[34699]: removed group 'user_quadlet_basic' owned by 'user_quadlet_basic' Apr 02 12:22:47 managed-node1 userdel[34699]: removed shadow group 'user_quadlet_basic' owned by 'user_quadlet_basic' Apr 02 12:22:50 managed-node1 platform-python[34952]: ansible-command Invoked with _raw_params=podman --version warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:22:51 managed-node1 platform-python[35081]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Apr 02 12:22:52 managed-node1 platform-python[35205]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:22:59 managed-node1 platform-python[35590]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:23:00 managed-node1 platform-python[35715]: ansible-systemd Invoked with name=quadlet-basic-mysql.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None user=None Apr 02 12:23:00 managed-node1 systemd[1]: Reloading. Apr 02 12:23:00 managed-node1 systemd[1]: /etc/systemd/system/user-0.slice:4: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument Apr 02 12:23:00 managed-node1 systemd[1]: /etc/systemd/system/user-.slice.d/override.conf:2: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument Apr 02 12:23:00 managed-node1 systemd[1]: Stopping quadlet-basic-mysql.service... -- Subject: Unit quadlet-basic-mysql.service has begun shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit quadlet-basic-mysql.service has begun shutting down. Apr 02 12:23:02 managed-node1 kernel: cni-podman1: port 1(vethbd11679c) entered disabled state Apr 02 12:23:02 managed-node1 kernel: device vethbd11679c left promiscuous mode Apr 02 12:23:02 managed-node1 kernel: cni-podman1: port 1(vethbd11679c) entered disabled state Apr 02 12:23:02 managed-node1 systemd[1]: run-netns-netns\x2dcb656d15\x2d59d3\x2d3fd3\x2d490c\x2d94a39f43ab79.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit run-netns-netns\x2dcb656d15\x2d59d3\x2d3fd3\x2d490c\x2d94a39f43ab79.mount has successfully entered the 'dead' state. Apr 02 12:23:02 managed-node1 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-b6b6d0d7dc865df4c1ee581fe8db037c8ffd13cca7c0a4431e0f17c10ef82bad-userdata-shm.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay\x2dcontainers-b6b6d0d7dc865df4c1ee581fe8db037c8ffd13cca7c0a4431e0f17c10ef82bad-userdata-shm.mount has successfully entered the 'dead' state. Apr 02 12:23:02 managed-node1 systemd[1]: var-lib-containers-storage-overlay-4f000394e4c206eb21e6d9d7d10dc523b8c5d3fba3b9458aaa957ba1fab50883-merged.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay-4f000394e4c206eb21e6d9d7d10dc523b8c5d3fba3b9458aaa957ba1fab50883-merged.mount has successfully entered the 'dead' state. Apr 02 12:23:02 managed-node1 quadlet-basic-mysql[35751]: b6b6d0d7dc865df4c1ee581fe8db037c8ffd13cca7c0a4431e0f17c10ef82bad Apr 02 12:23:02 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Apr 02 12:23:02 managed-node1 systemd[1]: quadlet-basic-mysql.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit quadlet-basic-mysql.service has successfully entered the 'dead' state. Apr 02 12:23:02 managed-node1 systemd[1]: Stopped quadlet-basic-mysql.service. -- Subject: Unit quadlet-basic-mysql.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit quadlet-basic-mysql.service has finished shutting down. Apr 02 12:23:03 managed-node1 platform-python[36000]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-basic-mysql.container follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:23:03 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Apr 02 12:23:04 managed-node1 platform-python[36248]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-basic-mysql.container state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Apr 02 12:23:05 managed-node1 platform-python[36371]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None Apr 02 12:23:05 managed-node1 systemd[1]: Reloading. Apr 02 12:23:05 managed-node1 systemd[1]: /etc/systemd/system/user-0.slice:4: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument Apr 02 12:23:05 managed-node1 systemd[1]: /etc/systemd/system/user-.slice.d/override.conf:2: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument Apr 02 12:23:05 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Apr 02 12:23:06 managed-node1 platform-python[36655]: ansible-command Invoked with _raw_params=podman image prune --all -f warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:23:06 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Apr 02 12:23:07 managed-node1 platform-python[36786]: ansible-command Invoked with _raw_params=podman images -n warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:23:07 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Apr 02 12:23:07 managed-node1 platform-python[36916]: ansible-command Invoked with _raw_params=podman volume ls -n warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:23:08 managed-node1 platform-python[37046]: ansible-command Invoked with _raw_params=podman ps --noheading warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:23:08 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Apr 02 12:23:08 managed-node1 platform-python[37176]: ansible-command Invoked with _raw_params=podman network ls -n -q warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:23:08 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Apr 02 12:23:09 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Apr 02 12:23:09 managed-node1 platform-python[37617]: ansible-service_facts Invoked Apr 02 12:23:12 managed-node1 platform-python[37830]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:23:13 managed-node1 platform-python[37955]: ansible-systemd Invoked with name=quadlet-basic-unused-volume-volume.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None user=None Apr 02 12:23:13 managed-node1 systemd[1]: Reloading. Apr 02 12:23:14 managed-node1 systemd[1]: /etc/systemd/system/user-0.slice:4: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument Apr 02 12:23:14 managed-node1 systemd[1]: /etc/systemd/system/user-.slice.d/override.conf:2: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument Apr 02 12:23:14 managed-node1 systemd[1]: quadlet-basic-unused-volume-volume.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit quadlet-basic-unused-volume-volume.service has successfully entered the 'dead' state. Apr 02 12:23:14 managed-node1 systemd[1]: Stopped quadlet-basic-unused-volume-volume.service. -- Subject: Unit quadlet-basic-unused-volume-volume.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit quadlet-basic-unused-volume-volume.service has finished shutting down. Apr 02 12:23:14 managed-node1 platform-python[38112]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-basic-unused-volume.volume follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:23:15 managed-node1 platform-python[38360]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-basic-unused-volume.volume state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Apr 02 12:23:16 managed-node1 platform-python[38483]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None Apr 02 12:23:16 managed-node1 systemd[1]: Reloading. Apr 02 12:23:16 managed-node1 systemd[1]: /etc/systemd/system/user-0.slice:4: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument Apr 02 12:23:16 managed-node1 systemd[1]: /etc/systemd/system/user-.slice.d/override.conf:2: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument Apr 02 12:23:16 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Apr 02 12:23:17 managed-node1 platform-python[38767]: ansible-command Invoked with _raw_params=podman image prune --all -f warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:23:17 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Apr 02 12:23:18 managed-node1 platform-python[38897]: ansible-command Invoked with _raw_params=podman images -n warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:23:18 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Apr 02 12:23:18 managed-node1 platform-python[39028]: ansible-command Invoked with _raw_params=podman volume ls -n warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:23:18 managed-node1 platform-python[39158]: ansible-command Invoked with _raw_params=podman ps --noheading warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:23:19 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Apr 02 12:23:19 managed-node1 platform-python[39288]: ansible-command Invoked with _raw_params=podman network ls -n -q warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:23:19 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Apr 02 12:23:20 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Apr 02 12:23:20 managed-node1 platform-python[39728]: ansible-service_facts Invoked Apr 02 12:23:23 managed-node1 platform-python[39941]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:23:25 managed-node1 platform-python[40066]: ansible-systemd Invoked with name=quadlet-basic-mysql-volume.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None user=None Apr 02 12:23:25 managed-node1 systemd[1]: Reloading. Apr 02 12:23:25 managed-node1 systemd[1]: /etc/systemd/system/user-0.slice:4: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument Apr 02 12:23:25 managed-node1 systemd[1]: /etc/systemd/system/user-.slice.d/override.conf:2: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument Apr 02 12:23:25 managed-node1 systemd[1]: quadlet-basic-mysql-volume.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit quadlet-basic-mysql-volume.service has successfully entered the 'dead' state. Apr 02 12:23:25 managed-node1 systemd[1]: Stopped quadlet-basic-mysql-volume.service. -- Subject: Unit quadlet-basic-mysql-volume.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit quadlet-basic-mysql-volume.service has finished shutting down. Apr 02 12:23:25 managed-node1 platform-python[40223]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-basic-mysql.volume follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:23:26 managed-node1 platform-python[40471]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-basic-mysql.volume state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Apr 02 12:23:27 managed-node1 platform-python[40594]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None Apr 02 12:23:27 managed-node1 systemd[1]: Reloading. Apr 02 12:23:27 managed-node1 systemd[1]: /etc/systemd/system/user-0.slice:4: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument Apr 02 12:23:27 managed-node1 systemd[1]: /etc/systemd/system/user-.slice.d/override.conf:2: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument Apr 02 12:23:27 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Apr 02 12:23:28 managed-node1 platform-python[40877]: ansible-command Invoked with _raw_params=podman image prune --all -f warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:23:30 managed-node1 platform-python[41007]: ansible-command Invoked with _raw_params=podman images -n warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:23:30 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Apr 02 12:23:30 managed-node1 platform-python[41138]: ansible-command Invoked with _raw_params=podman volume ls -n warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:23:30 managed-node1 platform-python[41268]: ansible-command Invoked with _raw_params=podman ps --noheading warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:23:30 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Apr 02 12:23:31 managed-node1 platform-python[41399]: ansible-command Invoked with _raw_params=podman network ls -n -q warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:23:31 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Apr 02 12:23:32 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Apr 02 12:23:32 managed-node1 platform-python[41839]: ansible-service_facts Invoked Apr 02 12:23:34 managed-node1 platform-python[42052]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:23:36 managed-node1 platform-python[42177]: ansible-systemd Invoked with name=quadlet-basic-unused-network-network.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None user=None Apr 02 12:23:36 managed-node1 systemd[1]: Reloading. Apr 02 12:23:36 managed-node1 systemd[1]: /etc/systemd/system/user-0.slice:4: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument Apr 02 12:23:36 managed-node1 systemd[1]: /etc/systemd/system/user-.slice.d/override.conf:2: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument Apr 02 12:23:36 managed-node1 systemd[1]: quadlet-basic-unused-network-network.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit quadlet-basic-unused-network-network.service has successfully entered the 'dead' state. Apr 02 12:23:36 managed-node1 systemd[1]: Stopped quadlet-basic-unused-network-network.service. -- Subject: Unit quadlet-basic-unused-network-network.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit quadlet-basic-unused-network-network.service has finished shutting down. Apr 02 12:23:37 managed-node1 platform-python[42334]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-basic-unused-network.network follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:23:38 managed-node1 platform-python[42582]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-basic-unused-network.network state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Apr 02 12:23:38 managed-node1 platform-python[42705]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None Apr 02 12:23:38 managed-node1 systemd[1]: Reloading. Apr 02 12:23:39 managed-node1 systemd[1]: /etc/systemd/system/user-0.slice:4: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument Apr 02 12:23:39 managed-node1 systemd[1]: /etc/systemd/system/user-.slice.d/override.conf:2: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument Apr 02 12:23:39 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Apr 02 12:23:40 managed-node1 platform-python[43038]: ansible-command Invoked with _raw_params=podman image prune --all -f warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:23:40 managed-node1 platform-python[43168]: ansible-command Invoked with _raw_params=podman images -n warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:23:41 managed-node1 platform-python[43298]: ansible-command Invoked with _raw_params=podman volume ls -n warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:23:41 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Apr 02 12:23:41 managed-node1 platform-python[43429]: ansible-command Invoked with _raw_params=podman ps --noheading warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:23:41 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Apr 02 12:23:42 managed-node1 platform-python[43559]: ansible-command Invoked with _raw_params=podman network ls -n -q warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:23:42 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Apr 02 12:23:44 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Apr 02 12:23:44 managed-node1 platform-python[43975]: ansible-service_facts Invoked Apr 02 12:23:47 managed-node1 platform-python[44188]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:23:49 managed-node1 platform-python[44313]: ansible-systemd Invoked with name=quadlet-basic-network.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None user=None Apr 02 12:23:49 managed-node1 systemd[1]: Reloading. Apr 02 12:23:49 managed-node1 systemd[1]: /etc/systemd/system/user-0.slice:4: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument Apr 02 12:23:49 managed-node1 systemd[1]: /etc/systemd/system/user-.slice.d/override.conf:2: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument Apr 02 12:23:49 managed-node1 systemd[1]: quadlet-basic-network.service: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit quadlet-basic-network.service has successfully entered the 'dead' state. Apr 02 12:23:49 managed-node1 systemd[1]: Stopped quadlet-basic-network.service. -- Subject: Unit quadlet-basic-network.service has finished shutting down -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit quadlet-basic-network.service has finished shutting down. Apr 02 12:23:49 managed-node1 platform-python[44470]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-basic.network follow=False get_md5=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Apr 02 12:23:50 managed-node1 platform-python[44718]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-basic.network state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None content=NOT_LOGGING_PARAMETER backup=None remote_src=None regexp=None delimiter=None directory_mode=None Apr 02 12:23:51 managed-node1 platform-python[44841]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None user=None Apr 02 12:23:51 managed-node1 systemd[1]: Reloading. Apr 02 12:23:51 managed-node1 systemd[1]: /etc/systemd/system/user-0.slice:4: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument Apr 02 12:23:51 managed-node1 systemd[1]: /etc/systemd/system/user-.slice.d/override.conf:2: Failed to assign slice user.slice to unit user-0.slice, ignoring: Invalid argument Apr 02 12:23:52 managed-node1 NetworkManager[629]: [1743611032.0016] device (cni-podman1): state change: activated -> unmanaged (reason 'unmanaged', sys-iface-state: 'removed') Apr 02 12:23:52 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Apr 02 12:23:52 managed-node1 dbus-daemon[584]: [system] Activating via systemd: service name='org.freedesktop.nm_dispatcher' unit='dbus-org.freedesktop.nm-dispatcher.service' requested by ':1.2' (uid=0 pid=629 comm="/usr/sbin/NetworkManager --no-daemon " label="system_u:system_r:NetworkManager_t:s0") Apr 02 12:23:52 managed-node1 systemd[1]: Starting Network Manager Script Dispatcher Service... -- Subject: Unit NetworkManager-dispatcher.service has begun start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit NetworkManager-dispatcher.service has begun starting up. Apr 02 12:23:52 managed-node1 dbus-daemon[584]: [system] Successfully activated service 'org.freedesktop.nm_dispatcher' Apr 02 12:23:52 managed-node1 systemd[1]: Started Network Manager Script Dispatcher Service. -- Subject: Unit NetworkManager-dispatcher.service has finished start-up -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- Unit NetworkManager-dispatcher.service has finished starting up. -- -- The start-up result is done. Apr 02 12:23:52 managed-node1 platform-python[45160]: ansible-command Invoked with _raw_params=podman image prune --all -f warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:23:52 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Apr 02 12:23:53 managed-node1 platform-python[45290]: ansible-command Invoked with _raw_params=podman images -n warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:23:53 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Apr 02 12:23:53 managed-node1 platform-python[45421]: ansible-command Invoked with _raw_params=podman volume ls -n warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:23:54 managed-node1 platform-python[45551]: ansible-command Invoked with _raw_params=podman ps --noheading warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:23:54 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Apr 02 12:23:54 managed-node1 platform-python[45682]: ansible-command Invoked with _raw_params=podman network ls -n -q warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Apr 02 12:23:55 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Succeeded. -- Subject: Unit succeeded -- Defined-By: systemd -- Support: https://access.redhat.com/support -- -- The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Apr 02 12:23:56 managed-node1 platform-python[46073]: ansible-service_facts Invoked Apr 02 12:23:58 managed-node1 platform-python[46285]: ansible-command Invoked with _raw_params=journalctl -ex warn=True _uses_shell=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None PLAY RECAP ********************************************************************* managed-node1 : ok=964 changed=81 unreachable=0 failed=1 skipped=1080 rescued=1 ignored=0 Wednesday 02 April 2025 12:23:58 -0400 (0:00:00.571) 0:06:12.654 ******* =============================================================================== Reboot ----------------------------------------------------------------- 40.31s /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:188 fedora.linux_system_roles.podman : Ensure container images are present --- 8.66s /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 fedora.linux_system_roles.podman : Ensure container images are present --- 7.10s /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 fedora.linux_system_roles.podman : Stop and disable service ------------- 2.82s /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 fedora.linux_system_roles.podman : Stop and disable service ------------- 2.71s /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 fedora.linux_system_roles.podman : Gather the package facts ------------- 2.20s /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 fedora.linux_system_roles.podman : For testing and debugging - services --- 1.98s /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : Gather the package facts ------------- 1.78s /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 fedora.linux_system_roles.podman : Gather the package facts ------------- 1.70s /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 fedora.linux_system_roles.podman : For testing and debugging - services --- 1.69s /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : For testing and debugging - services --- 1.68s /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : For testing and debugging - services --- 1.66s /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : For testing and debugging - services --- 1.65s /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : For testing and debugging - services --- 1.65s /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : For testing and debugging - services --- 1.65s /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : Gather the package facts ------------- 1.64s /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 fedora.linux_system_roles.podman : For testing and debugging - services --- 1.63s /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : For testing and debugging - services --- 1.62s /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : For testing and debugging - services --- 1.60s /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : Gather the package facts ------------- 1.58s /tmp/collections-TGW/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6