ansible-playbook [core 2.17.12] config file = None configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /usr/local/lib/python3.12/site-packages/ansible ansible collection location = /tmp/collections-hA1 executable location = /usr/local/bin/ansible-playbook python version = 3.12.11 (main, Jun 4 2025, 00:00:00) [GCC 14.2.1 20250110 (Red Hat 14.2.1-8)] (/usr/bin/python3.12) jinja version = 3.1.6 libyaml = True No config file found; using defaults running playbook inside collection fedora.linux_system_roles Skipping callback 'debug', as we already have a stdout callback. Skipping callback 'json', as we already have a stdout callback. Skipping callback 'jsonl', as we already have a stdout callback. Skipping callback 'default', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. PLAYBOOK: tests_quadlet_basic.yml ********************************************** 2 plays in /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml PLAY [all] ********************************************************************* TASK [Include vault variables] ************************************************* task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:5 Monday 07 July 2025 20:33:39 -0400 (0:00:00.018) 0:00:00.018 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_test_password": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n35383939616163653333633431363463313831383037386236646138333162396161356130303461\n3932623930643263313563336163316337643562333936360a363538636631313039343233383732\n38666530383538656639363465313230343533386130303833336434303438333161656262346562\n3362626538613031640a663330613638366132356534363534353239616666653466353961323533\n6565\n" }, "mysql_container_root_password": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n61333932373230333539663035366431326163363166363036323963623131363530326231303634\n6635326161643165363366323062333334363730376631660a393566366139353861656364656661\n38653463363837336639363032646433666361646535366137303464623261313663643336306465\n6264663730656337310a343962353137386238383064646533366433333437303566656433386233\n34343235326665646661623131643335313236313131353661386338343366316261643634653633\n3832313034366536616531323963333234326461353130303532\n" } }, "ansible_included_var_files": [ "/tmp/podman-z9U/tests/vars/vault-variables.yml" ], "changed": false } PLAY [Ensure that the role can manage quadlet specs] *************************** TASK [Gathering Facts] ********************************************************* task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:9 Monday 07 July 2025 20:33:39 -0400 (0:00:00.053) 0:00:00.072 *********** [WARNING]: Platform linux on host managed-node1 is using the discovered Python interpreter at /usr/bin/python3.12, but future installation of another Python interpreter could change the meaning of that path. See https://docs.ansible.com/ansible- core/2.17/reference_appendices/interpreter_discovery.html for more information. ok: [managed-node1] TASK [Test is only supported on x86_64] **************************************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:55 Monday 07 July 2025 20:33:40 -0400 (0:00:01.138) 0:00:01.210 *********** skipping: [managed-node1] => { "false_condition": "ansible_facts[\"architecture\"] != \"x86_64\"" } TASK [End test] **************************************************************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:62 Monday 07 July 2025 20:33:40 -0400 (0:00:00.014) 0:00:01.225 *********** META: end_play conditional evaluated to False, continuing play skipping: [managed-node1] => { "skip_reason": "end_play conditional evaluated to False, continuing play" } MSG: end_play TASK [Run role - do not pull images] ******************************************* task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:70 Monday 07 July 2025 20:33:40 -0400 (0:00:00.007) 0:00:01.232 *********** included: fedora.linux_system_roles.podman for managed-node1 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Monday 07 July 2025 20:33:40 -0400 (0:00:00.047) 0:00:01.280 *********** included: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Monday 07 July 2025 20:33:40 -0400 (0:00:00.022) 0:00:01.302 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Monday 07 July 2025 20:33:41 -0400 (0:00:00.033) 0:00:01.336 *********** ok: [managed-node1] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Monday 07 July 2025 20:33:41 -0400 (0:00:00.428) 0:00:01.764 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Monday 07 July 2025 20:33:41 -0400 (0:00:00.022) 0:00:01.786 *********** ok: [managed-node1] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Monday 07 July 2025 20:33:41 -0400 (0:00:00.351) 0:00:02.138 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_is_transactional": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Monday 07 July 2025 20:33:41 -0400 (0:00:00.021) 0:00:02.159 *********** ok: [managed-node1] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node1] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node1] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node1] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Monday 07 July 2025 20:33:41 -0400 (0:00:00.043) 0:00:02.203 *********** ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Monday 07 July 2025 20:33:43 -0400 (0:00:01.117) 0:00:03.320 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Monday 07 July 2025 20:33:43 -0400 (0:00:00.046) 0:00:03.366 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages)) | list | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Monday 07 July 2025 20:33:43 -0400 (0:00:00.050) 0:00:03.417 *********** skipping: [managed-node1] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Monday 07 July 2025 20:33:43 -0400 (0:00:00.045) 0:00:03.462 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Monday 07 July 2025 20:33:43 -0400 (0:00:00.044) 0:00:03.507 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Monday 07 July 2025 20:33:43 -0400 (0:00:00.044) 0:00:03.551 *********** ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.023681", "end": "2025-07-07 20:33:43.656349", "rc": 0, "start": "2025-07-07 20:33:43.632668" } STDOUT: podman version 5.5.1 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Monday 07 July 2025 20:33:43 -0400 (0:00:00.482) 0:00:04.033 *********** ok: [managed-node1] => { "ansible_facts": { "podman_version": "5.5.1" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Monday 07 July 2025 20:33:43 -0400 (0:00:00.031) 0:00:04.065 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Monday 07 July 2025 20:33:43 -0400 (0:00:00.029) 0:00:04.095 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Monday 07 July 2025 20:33:43 -0400 (0:00:00.050) 0:00:04.146 *********** META: end_host conditional evaluated to False, continuing execution for managed-node1 skipping: [managed-node1] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1" } MSG: end_host conditional evaluated to false, continuing execution for managed-node1 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Monday 07 July 2025 20:33:43 -0400 (0:00:00.052) 0:00:04.198 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Monday 07 July 2025 20:33:43 -0400 (0:00:00.064) 0:00:04.263 *********** META: end_host conditional evaluated to False, continuing execution for managed-node1 skipping: [managed-node1] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1" } MSG: end_host conditional evaluated to false, continuing execution for managed-node1 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Monday 07 July 2025 20:33:43 -0400 (0:00:00.043) 0:00:04.307 *********** included: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Monday 07 July 2025 20:33:44 -0400 (0:00:00.060) 0:00:04.367 *********** ok: [managed-node1] => { "ansible_facts": { "getent_passwd": { "root": [ "x", "0", "0", "Super User", "/root", "/bin/bash" ] } }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Monday 07 July 2025 20:33:44 -0400 (0:00:00.455) 0:00:04.822 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Monday 07 July 2025 20:33:44 -0400 (0:00:00.033) 0:00:04.855 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Monday 07 July 2025 20:33:44 -0400 (0:00:00.037) 0:00:04.893 *********** ok: [managed-node1] => { "changed": false, "stat": { "atime": 1751934569.8134274, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "fa9845e044ad8d1bfcc68a2c8e62c8d83a1bb20e", "ctime": 1751934562.4243717, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 8668983, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1748217600.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15560, "uid": 0, "version": "1546733485", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Monday 07 July 2025 20:33:44 -0400 (0:00:00.371) 0:00:05.264 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Monday 07 July 2025 20:33:44 -0400 (0:00:00.030) 0:00:05.295 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Monday 07 July 2025 20:33:45 -0400 (0:00:00.030) 0:00:05.326 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Monday 07 July 2025 20:33:45 -0400 (0:00:00.031) 0:00:05.357 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Monday 07 July 2025 20:33:45 -0400 (0:00:00.029) 0:00:05.386 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Monday 07 July 2025 20:33:45 -0400 (0:00:00.030) 0:00:05.417 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Monday 07 July 2025 20:33:45 -0400 (0:00:00.029) 0:00:05.446 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Monday 07 July 2025 20:33:45 -0400 (0:00:00.030) 0:00:05.476 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Monday 07 July 2025 20:33:45 -0400 (0:00:00.030) 0:00:05.507 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_parent_mode": "0755", "__podman_parent_path": "/etc/containers", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:126 Monday 07 July 2025 20:33:45 -0400 (0:00:00.059) 0:00:05.567 *********** included: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Monday 07 July 2025 20:33:45 -0400 (0:00:00.085) 0:00:05.652 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Monday 07 July 2025 20:33:45 -0400 (0:00:00.031) 0:00:05.683 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:129 Monday 07 July 2025 20:33:45 -0400 (0:00:00.044) 0:00:05.727 *********** included: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Monday 07 July 2025 20:33:45 -0400 (0:00:00.058) 0:00:05.786 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Monday 07 July 2025 20:33:45 -0400 (0:00:00.029) 0:00:05.815 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:132 Monday 07 July 2025 20:33:45 -0400 (0:00:00.030) 0:00:05.846 *********** included: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:7 Monday 07 July 2025 20:33:45 -0400 (0:00:00.057) 0:00:05.904 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:15 Monday 07 July 2025 20:33:45 -0400 (0:00:00.030) 0:00:05.934 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:135 Monday 07 July 2025 20:33:45 -0400 (0:00:00.028) 0:00:05.963 *********** included: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:8 Monday 07 July 2025 20:33:45 -0400 (0:00:00.060) 0:00:06.024 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:16 Monday 07 July 2025 20:33:45 -0400 (0:00:00.029) 0:00:06.053 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:21 Monday 07 July 2025 20:33:45 -0400 (0:00:00.029) 0:00:06.082 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:27 Monday 07 July 2025 20:33:45 -0400 (0:00:00.029) 0:00:06.111 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:141 Monday 07 July 2025 20:33:45 -0400 (0:00:00.030) 0:00:06.141 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_firewall | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:148 Monday 07 July 2025 20:33:45 -0400 (0:00:00.028) 0:00:06.170 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:155 Monday 07 July 2025 20:33:45 -0400 (0:00:00.057) 0:00:06.228 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:159 Monday 07 July 2025 20:33:45 -0400 (0:00:00.030) 0:00:06.258 *********** skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:168 Monday 07 July 2025 20:33:45 -0400 (0:00:00.025) 0:00:06.284 *********** skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:177 Monday 07 July 2025 20:33:45 -0400 (0:00:00.026) 0:00:06.311 *********** skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:184 Monday 07 July 2025 20:33:46 -0400 (0:00:00.025) 0:00:06.337 *********** skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:191 Monday 07 July 2025 20:33:46 -0400 (0:00:00.027) 0:00:06.364 *********** included: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Monday 07 July 2025 20:33:46 -0400 (0:00:00.072) 0:00:06.437 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Container": { "ContainerName": "nopull", "Image": "quay.io/libpod/testimage:20210610" }, "Install": { "WantedBy": "default.target" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Monday 07 July 2025 20:33:46 -0400 (0:00:00.050) 0:00:06.488 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": false, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Monday 07 July 2025 20:33:46 -0400 (0:00:00.060) 0:00:06.549 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Monday 07 July 2025 20:33:46 -0400 (0:00:00.037) 0:00:06.586 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_name": "nopull", "__podman_quadlet_type": "container", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Monday 07 July 2025 20:33:46 -0400 (0:00:00.054) 0:00:06.640 *********** included: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Monday 07 July 2025 20:33:46 -0400 (0:00:00.066) 0:00:06.707 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Monday 07 July 2025 20:33:46 -0400 (0:00:00.041) 0:00:06.748 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Monday 07 July 2025 20:33:46 -0400 (0:00:00.041) 0:00:06.790 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Monday 07 July 2025 20:33:46 -0400 (0:00:00.041) 0:00:06.831 *********** ok: [managed-node1] => { "changed": false, "stat": { "atime": 1751934569.8134274, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "fa9845e044ad8d1bfcc68a2c8e62c8d83a1bb20e", "ctime": 1751934562.4243717, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 8668983, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1748217600.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15560, "uid": 0, "version": "1546733485", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Monday 07 July 2025 20:33:46 -0400 (0:00:00.382) 0:00:07.214 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Monday 07 July 2025 20:33:46 -0400 (0:00:00.063) 0:00:07.278 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Monday 07 July 2025 20:33:46 -0400 (0:00:00.031) 0:00:07.309 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Monday 07 July 2025 20:33:47 -0400 (0:00:00.031) 0:00:07.341 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Monday 07 July 2025 20:33:47 -0400 (0:00:00.030) 0:00:07.372 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Monday 07 July 2025 20:33:47 -0400 (0:00:00.031) 0:00:07.404 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Monday 07 July 2025 20:33:47 -0400 (0:00:00.031) 0:00:07.436 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Monday 07 July 2025 20:33:47 -0400 (0:00:00.035) 0:00:07.471 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Monday 07 July 2025 20:33:47 -0400 (0:00:00.040) 0:00:07.511 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_activate_systemd_unit": false, "__podman_images_found": [ "quay.io/libpod/testimage:20210610" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "nopull.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Monday 07 July 2025 20:33:47 -0400 (0:00:00.075) 0:00:07.587 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Monday 07 July 2025 20:33:47 -0400 (0:00:00.038) 0:00:07.625 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:88 Monday 07 July 2025 20:33:47 -0400 (0:00:00.039) 0:00:07.664 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_images": [ "quay.io/libpod/testimage:20210610" ], "__podman_quadlet_file": "/etc/containers/systemd/nopull.container", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:106 Monday 07 July 2025 20:33:47 -0400 (0:00:00.080) 0:00:07.744 *********** ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:113 Monday 07 July 2025 20:33:47 -0400 (0:00:00.047) 0:00:07.792 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:117 Monday 07 July 2025 20:33:47 -0400 (0:00:00.032) 0:00:07.825 *********** included: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Monday 07 July 2025 20:33:47 -0400 (0:00:00.067) 0:00:07.893 *********** included: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Monday 07 July 2025 20:33:47 -0400 (0:00:00.069) 0:00:07.963 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Monday 07 July 2025 20:33:47 -0400 (0:00:00.047) 0:00:08.010 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Monday 07 July 2025 20:33:47 -0400 (0:00:00.088) 0:00:08.099 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Monday 07 July 2025 20:33:47 -0400 (0:00:00.045) 0:00:08.145 *********** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Monday 07 July 2025 20:33:47 -0400 (0:00:00.041) 0:00:08.186 *********** skipping: [managed-node1] => (item=None) => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Monday 07 July 2025 20:33:47 -0400 (0:00:00.061) 0:00:08.247 *********** ok: [managed-node1] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 43, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:50 Monday 07 July 2025 20:33:48 -0400 (0:00:00.592) 0:00:08.839 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_quadlet_file_src | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:62 Monday 07 July 2025 20:33:48 -0400 (0:00:00.038) 0:00:08.877 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_quadlet_str | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:75 Monday 07 July 2025 20:33:48 -0400 (0:00:00.040) 0:00:08.918 *********** changed: [managed-node1] => { "changed": true, "checksum": "670d64fc68a9768edb20cad26df2acc703542d85", "dest": "/etc/containers/systemd/nopull.container", "gid": 0, "group": "root", "md5sum": "cedb6667f6cd1b033fe06e2810fe6b19", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 151, "src": "/root/.ansible/tmp/ansible-tmp-1751934828.656423-17050-224685263014320/.source.container", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:87 Monday 07 July 2025 20:33:49 -0400 (0:00:00.836) 0:00:09.755 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_activate_systemd_unit | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:115 Monday 07 July 2025 20:33:49 -0400 (0:00:00.057) 0:00:09.813 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_activate_systemd_unit | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:131 Monday 07 July 2025 20:33:49 -0400 (0:00:00.090) 0:00:09.903 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_activate_systemd_unit | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:198 Monday 07 July 2025 20:33:49 -0400 (0:00:00.062) 0:00:09.965 *********** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:204 Monday 07 July 2025 20:33:49 -0400 (0:00:00.045) 0:00:10.010 *********** skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:213 Monday 07 July 2025 20:33:49 -0400 (0:00:00.045) 0:00:10.056 *********** skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Verify image not pulled] ************************************************* task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:86 Monday 07 July 2025 20:33:49 -0400 (0:00:00.074) 0:00:10.131 *********** ok: [managed-node1] => { "changed": false } MSG: All assertions passed TASK [Run role - try to pull bogus image] ************************************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:90 Monday 07 July 2025 20:33:49 -0400 (0:00:00.055) 0:00:10.186 *********** included: fedora.linux_system_roles.podman for managed-node1 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Monday 07 July 2025 20:33:50 -0400 (0:00:00.160) 0:00:10.347 *********** included: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Monday 07 July 2025 20:33:50 -0400 (0:00:00.127) 0:00:10.474 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Monday 07 July 2025 20:33:50 -0400 (0:00:00.058) 0:00:10.533 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Monday 07 July 2025 20:33:50 -0400 (0:00:00.049) 0:00:10.582 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Monday 07 July 2025 20:33:50 -0400 (0:00:00.052) 0:00:10.635 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Monday 07 July 2025 20:33:50 -0400 (0:00:00.047) 0:00:10.682 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Monday 07 July 2025 20:33:50 -0400 (0:00:00.052) 0:00:10.734 *********** ok: [managed-node1] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node1] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node1] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node1] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Monday 07 July 2025 20:33:50 -0400 (0:00:00.087) 0:00:10.822 *********** ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Monday 07 July 2025 20:33:51 -0400 (0:00:00.944) 0:00:11.767 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Monday 07 July 2025 20:33:51 -0400 (0:00:00.029) 0:00:11.797 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages)) | list | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Monday 07 July 2025 20:33:51 -0400 (0:00:00.034) 0:00:11.831 *********** skipping: [managed-node1] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Monday 07 July 2025 20:33:51 -0400 (0:00:00.027) 0:00:11.859 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Monday 07 July 2025 20:33:51 -0400 (0:00:00.028) 0:00:11.887 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Monday 07 July 2025 20:33:51 -0400 (0:00:00.027) 0:00:11.915 *********** ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.022718", "end": "2025-07-07 20:33:51.923360", "rc": 0, "start": "2025-07-07 20:33:51.900642" } STDOUT: podman version 5.5.1 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Monday 07 July 2025 20:33:51 -0400 (0:00:00.387) 0:00:12.302 *********** ok: [managed-node1] => { "ansible_facts": { "podman_version": "5.5.1" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Monday 07 July 2025 20:33:52 -0400 (0:00:00.033) 0:00:12.335 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Monday 07 July 2025 20:33:52 -0400 (0:00:00.059) 0:00:12.394 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Monday 07 July 2025 20:33:52 -0400 (0:00:00.031) 0:00:12.426 *********** META: end_host conditional evaluated to False, continuing execution for managed-node1 skipping: [managed-node1] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1" } MSG: end_host conditional evaluated to false, continuing execution for managed-node1 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Monday 07 July 2025 20:33:52 -0400 (0:00:00.034) 0:00:12.461 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Monday 07 July 2025 20:33:52 -0400 (0:00:00.037) 0:00:12.498 *********** META: end_host conditional evaluated to False, continuing execution for managed-node1 skipping: [managed-node1] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1" } MSG: end_host conditional evaluated to false, continuing execution for managed-node1 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Monday 07 July 2025 20:33:52 -0400 (0:00:00.042) 0:00:12.541 *********** included: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Monday 07 July 2025 20:33:52 -0400 (0:00:00.056) 0:00:12.597 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Monday 07 July 2025 20:33:52 -0400 (0:00:00.031) 0:00:12.629 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Monday 07 July 2025 20:33:52 -0400 (0:00:00.033) 0:00:12.662 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Monday 07 July 2025 20:33:52 -0400 (0:00:00.039) 0:00:12.701 *********** ok: [managed-node1] => { "changed": false, "stat": { "atime": 1751934569.8134274, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "fa9845e044ad8d1bfcc68a2c8e62c8d83a1bb20e", "ctime": 1751934562.4243717, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 8668983, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1748217600.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15560, "uid": 0, "version": "1546733485", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Monday 07 July 2025 20:33:52 -0400 (0:00:00.370) 0:00:13.072 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Monday 07 July 2025 20:33:52 -0400 (0:00:00.032) 0:00:13.104 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Monday 07 July 2025 20:33:52 -0400 (0:00:00.029) 0:00:13.134 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Monday 07 July 2025 20:33:52 -0400 (0:00:00.031) 0:00:13.165 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Monday 07 July 2025 20:33:52 -0400 (0:00:00.030) 0:00:13.195 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Monday 07 July 2025 20:33:52 -0400 (0:00:00.030) 0:00:13.226 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Monday 07 July 2025 20:33:52 -0400 (0:00:00.063) 0:00:13.289 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Monday 07 July 2025 20:33:53 -0400 (0:00:00.031) 0:00:13.321 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Monday 07 July 2025 20:33:53 -0400 (0:00:00.032) 0:00:13.353 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_parent_mode": "0755", "__podman_parent_path": "/etc/containers", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:126 Monday 07 July 2025 20:33:53 -0400 (0:00:00.040) 0:00:13.393 *********** included: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Monday 07 July 2025 20:33:53 -0400 (0:00:00.057) 0:00:13.450 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Monday 07 July 2025 20:33:53 -0400 (0:00:00.028) 0:00:13.479 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:129 Monday 07 July 2025 20:33:53 -0400 (0:00:00.029) 0:00:13.509 *********** included: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Monday 07 July 2025 20:33:53 -0400 (0:00:00.054) 0:00:13.563 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Monday 07 July 2025 20:33:53 -0400 (0:00:00.029) 0:00:13.593 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:132 Monday 07 July 2025 20:33:53 -0400 (0:00:00.028) 0:00:13.621 *********** included: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:7 Monday 07 July 2025 20:33:53 -0400 (0:00:00.057) 0:00:13.678 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:15 Monday 07 July 2025 20:33:53 -0400 (0:00:00.029) 0:00:13.707 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:135 Monday 07 July 2025 20:33:53 -0400 (0:00:00.028) 0:00:13.736 *********** included: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:8 Monday 07 July 2025 20:33:53 -0400 (0:00:00.058) 0:00:13.794 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:16 Monday 07 July 2025 20:33:53 -0400 (0:00:00.027) 0:00:13.822 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:21 Monday 07 July 2025 20:33:53 -0400 (0:00:00.058) 0:00:13.880 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:27 Monday 07 July 2025 20:33:53 -0400 (0:00:00.028) 0:00:13.909 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:141 Monday 07 July 2025 20:33:53 -0400 (0:00:00.029) 0:00:13.938 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_firewall | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:148 Monday 07 July 2025 20:33:53 -0400 (0:00:00.028) 0:00:13.967 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:155 Monday 07 July 2025 20:33:53 -0400 (0:00:00.042) 0:00:14.010 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:159 Monday 07 July 2025 20:33:53 -0400 (0:00:00.030) 0:00:14.040 *********** skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:168 Monday 07 July 2025 20:33:53 -0400 (0:00:00.025) 0:00:14.066 *********** skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:177 Monday 07 July 2025 20:33:53 -0400 (0:00:00.025) 0:00:14.092 *********** skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:184 Monday 07 July 2025 20:33:53 -0400 (0:00:00.026) 0:00:14.118 *********** skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:191 Monday 07 July 2025 20:33:53 -0400 (0:00:00.025) 0:00:14.144 *********** included: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Monday 07 July 2025 20:33:53 -0400 (0:00:00.070) 0:00:14.214 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Container": { "ContainerName": "bogus", "Image": "this_is_a_bogus_image" }, "Install": { "WantedBy": "default.target" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Monday 07 July 2025 20:33:53 -0400 (0:00:00.037) 0:00:14.252 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_continue_if_pull_fails": true, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Monday 07 July 2025 20:33:53 -0400 (0:00:00.035) 0:00:14.288 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Monday 07 July 2025 20:33:54 -0400 (0:00:00.027) 0:00:14.315 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_name": "bogus", "__podman_quadlet_type": "container", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Monday 07 July 2025 20:33:54 -0400 (0:00:00.043) 0:00:14.358 *********** included: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Monday 07 July 2025 20:33:54 -0400 (0:00:00.054) 0:00:14.413 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Monday 07 July 2025 20:33:54 -0400 (0:00:00.065) 0:00:14.478 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Monday 07 July 2025 20:33:54 -0400 (0:00:00.032) 0:00:14.510 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Monday 07 July 2025 20:33:54 -0400 (0:00:00.040) 0:00:14.551 *********** ok: [managed-node1] => { "changed": false, "stat": { "atime": 1751934569.8134274, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "fa9845e044ad8d1bfcc68a2c8e62c8d83a1bb20e", "ctime": 1751934562.4243717, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 8668983, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1748217600.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15560, "uid": 0, "version": "1546733485", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Monday 07 July 2025 20:33:54 -0400 (0:00:00.369) 0:00:14.920 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Monday 07 July 2025 20:33:54 -0400 (0:00:00.031) 0:00:14.952 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Monday 07 July 2025 20:33:54 -0400 (0:00:00.030) 0:00:14.982 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Monday 07 July 2025 20:33:54 -0400 (0:00:00.034) 0:00:15.017 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Monday 07 July 2025 20:33:54 -0400 (0:00:00.038) 0:00:15.056 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Monday 07 July 2025 20:33:54 -0400 (0:00:00.030) 0:00:15.086 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Monday 07 July 2025 20:33:54 -0400 (0:00:00.031) 0:00:15.117 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Monday 07 July 2025 20:33:54 -0400 (0:00:00.029) 0:00:15.146 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Monday 07 July 2025 20:33:54 -0400 (0:00:00.030) 0:00:15.177 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_activate_systemd_unit": false, "__podman_images_found": [ "this_is_a_bogus_image" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "bogus.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Monday 07 July 2025 20:33:54 -0400 (0:00:00.048) 0:00:15.225 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Monday 07 July 2025 20:33:54 -0400 (0:00:00.031) 0:00:15.257 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:88 Monday 07 July 2025 20:33:54 -0400 (0:00:00.032) 0:00:15.289 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_images": [ "this_is_a_bogus_image" ], "__podman_quadlet_file": "/etc/containers/systemd/bogus.container", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:106 Monday 07 July 2025 20:33:55 -0400 (0:00:00.069) 0:00:15.358 *********** ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:113 Monday 07 July 2025 20:33:55 -0400 (0:00:00.035) 0:00:15.394 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:117 Monday 07 July 2025 20:33:55 -0400 (0:00:00.027) 0:00:15.421 *********** included: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Monday 07 July 2025 20:33:55 -0400 (0:00:00.093) 0:00:15.514 *********** included: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Monday 07 July 2025 20:33:55 -0400 (0:00:00.050) 0:00:15.565 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Monday 07 July 2025 20:33:55 -0400 (0:00:00.027) 0:00:15.592 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Monday 07 July 2025 20:33:55 -0400 (0:00:00.027) 0:00:15.620 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Monday 07 July 2025 20:33:55 -0400 (0:00:00.029) 0:00:15.649 *********** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Monday 07 July 2025 20:33:55 -0400 (0:00:00.025) 0:00:15.675 *********** ok: [managed-node1] => (item=None) => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Monday 07 July 2025 20:33:55 -0400 (0:00:00.577) 0:00:16.252 *********** ok: [managed-node1] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 67, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:50 Monday 07 July 2025 20:33:56 -0400 (0:00:00.375) 0:00:16.628 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_quadlet_file_src | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:62 Monday 07 July 2025 20:33:56 -0400 (0:00:00.033) 0:00:16.661 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_quadlet_str | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:75 Monday 07 July 2025 20:33:56 -0400 (0:00:00.033) 0:00:16.694 *********** changed: [managed-node1] => { "changed": true, "checksum": "1d087e679d135214e8ac9ccaf33b2222916efb7f", "dest": "/etc/containers/systemd/bogus.container", "gid": 0, "group": "root", "md5sum": "97480a9a73734d9f8007d2c06e7fed1f", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 138, "src": "/root/.ansible/tmp/ansible-tmp-1751934836.4283967-17279-9425111643274/.source.container", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:87 Monday 07 July 2025 20:33:57 -0400 (0:00:00.701) 0:00:17.395 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_activate_systemd_unit | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:115 Monday 07 July 2025 20:33:57 -0400 (0:00:00.034) 0:00:17.430 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_activate_systemd_unit | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:131 Monday 07 July 2025 20:33:57 -0400 (0:00:00.038) 0:00:17.469 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_activate_systemd_unit | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:198 Monday 07 July 2025 20:33:57 -0400 (0:00:00.039) 0:00:17.508 *********** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:204 Monday 07 July 2025 20:33:57 -0400 (0:00:00.029) 0:00:17.538 *********** skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:213 Monday 07 July 2025 20:33:57 -0400 (0:00:00.027) 0:00:17.565 *********** skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Verify image not pulled and no error] ************************************ task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:106 Monday 07 July 2025 20:33:57 -0400 (0:00:00.044) 0:00:17.610 *********** ok: [managed-node1] => { "changed": false } MSG: All assertions passed TASK [Cleanup] ***************************************************************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:113 Monday 07 July 2025 20:33:57 -0400 (0:00:00.077) 0:00:17.688 *********** included: fedora.linux_system_roles.podman for managed-node1 => (item=nopull) included: fedora.linux_system_roles.podman for managed-node1 => (item=bogus) TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Monday 07 July 2025 20:33:57 -0400 (0:00:00.162) 0:00:17.850 *********** included: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Monday 07 July 2025 20:33:57 -0400 (0:00:00.052) 0:00:17.902 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Monday 07 July 2025 20:33:57 -0400 (0:00:00.038) 0:00:17.941 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Monday 07 July 2025 20:33:57 -0400 (0:00:00.033) 0:00:17.974 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Monday 07 July 2025 20:33:57 -0400 (0:00:00.031) 0:00:18.006 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Monday 07 July 2025 20:33:57 -0400 (0:00:00.032) 0:00:18.038 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Monday 07 July 2025 20:33:57 -0400 (0:00:00.033) 0:00:18.071 *********** [WARNING]: TASK: fedora.linux_system_roles.podman : Set platform/version specific variables: The loop variable 'item' is already in use. You should set the `loop_var` value in the `loop_control` option for the task to something else to avoid variable collisions and unexpected behavior. ok: [managed-node1] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node1] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node1] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node1] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Monday 07 July 2025 20:33:57 -0400 (0:00:00.085) 0:00:18.157 *********** ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Monday 07 July 2025 20:33:58 -0400 (0:00:00.943) 0:00:19.100 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Monday 07 July 2025 20:33:58 -0400 (0:00:00.044) 0:00:19.145 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages)) | list | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Monday 07 July 2025 20:33:58 -0400 (0:00:00.038) 0:00:19.183 *********** skipping: [managed-node1] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Monday 07 July 2025 20:33:58 -0400 (0:00:00.067) 0:00:19.251 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Monday 07 July 2025 20:33:58 -0400 (0:00:00.032) 0:00:19.283 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Monday 07 July 2025 20:33:59 -0400 (0:00:00.030) 0:00:19.313 *********** ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.023912", "end": "2025-07-07 20:33:59.325953", "rc": 0, "start": "2025-07-07 20:33:59.302041" } STDOUT: podman version 5.5.1 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Monday 07 July 2025 20:33:59 -0400 (0:00:00.392) 0:00:19.706 *********** ok: [managed-node1] => { "ansible_facts": { "podman_version": "5.5.1" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Monday 07 July 2025 20:33:59 -0400 (0:00:00.033) 0:00:19.740 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Monday 07 July 2025 20:33:59 -0400 (0:00:00.031) 0:00:19.771 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Monday 07 July 2025 20:33:59 -0400 (0:00:00.034) 0:00:19.805 *********** META: end_host conditional evaluated to False, continuing execution for managed-node1 skipping: [managed-node1] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1" } MSG: end_host conditional evaluated to false, continuing execution for managed-node1 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Monday 07 July 2025 20:33:59 -0400 (0:00:00.036) 0:00:19.841 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Monday 07 July 2025 20:33:59 -0400 (0:00:00.045) 0:00:19.886 *********** META: end_host conditional evaluated to False, continuing execution for managed-node1 skipping: [managed-node1] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1" } MSG: end_host conditional evaluated to false, continuing execution for managed-node1 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Monday 07 July 2025 20:33:59 -0400 (0:00:00.046) 0:00:19.933 *********** included: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Monday 07 July 2025 20:33:59 -0400 (0:00:00.061) 0:00:19.994 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Monday 07 July 2025 20:33:59 -0400 (0:00:00.036) 0:00:20.031 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Monday 07 July 2025 20:33:59 -0400 (0:00:00.036) 0:00:20.067 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Monday 07 July 2025 20:33:59 -0400 (0:00:00.042) 0:00:20.110 *********** ok: [managed-node1] => { "changed": false, "stat": { "atime": 1751934569.8134274, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "fa9845e044ad8d1bfcc68a2c8e62c8d83a1bb20e", "ctime": 1751934562.4243717, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 8668983, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1748217600.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15560, "uid": 0, "version": "1546733485", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Monday 07 July 2025 20:34:00 -0400 (0:00:00.380) 0:00:20.490 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Monday 07 July 2025 20:34:00 -0400 (0:00:00.072) 0:00:20.563 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Monday 07 July 2025 20:34:00 -0400 (0:00:00.033) 0:00:20.596 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Monday 07 July 2025 20:34:00 -0400 (0:00:00.034) 0:00:20.631 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Monday 07 July 2025 20:34:00 -0400 (0:00:00.032) 0:00:20.663 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Monday 07 July 2025 20:34:00 -0400 (0:00:00.031) 0:00:20.695 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Monday 07 July 2025 20:34:00 -0400 (0:00:00.031) 0:00:20.727 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Monday 07 July 2025 20:34:00 -0400 (0:00:00.032) 0:00:20.759 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Monday 07 July 2025 20:34:00 -0400 (0:00:00.031) 0:00:20.791 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_parent_mode": "0755", "__podman_parent_path": "/etc/containers", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:126 Monday 07 July 2025 20:34:00 -0400 (0:00:00.041) 0:00:20.833 *********** included: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Monday 07 July 2025 20:34:00 -0400 (0:00:00.058) 0:00:20.891 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Monday 07 July 2025 20:34:00 -0400 (0:00:00.032) 0:00:20.923 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:129 Monday 07 July 2025 20:34:00 -0400 (0:00:00.031) 0:00:20.955 *********** included: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Monday 07 July 2025 20:34:00 -0400 (0:00:00.059) 0:00:21.014 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Monday 07 July 2025 20:34:00 -0400 (0:00:00.031) 0:00:21.046 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:132 Monday 07 July 2025 20:34:00 -0400 (0:00:00.032) 0:00:21.079 *********** included: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:7 Monday 07 July 2025 20:34:00 -0400 (0:00:00.061) 0:00:21.141 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:15 Monday 07 July 2025 20:34:00 -0400 (0:00:00.080) 0:00:21.222 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:135 Monday 07 July 2025 20:34:00 -0400 (0:00:00.031) 0:00:21.254 *********** included: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:8 Monday 07 July 2025 20:34:01 -0400 (0:00:00.062) 0:00:21.316 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:16 Monday 07 July 2025 20:34:01 -0400 (0:00:00.031) 0:00:21.347 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:21 Monday 07 July 2025 20:34:01 -0400 (0:00:00.031) 0:00:21.379 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:27 Monday 07 July 2025 20:34:01 -0400 (0:00:00.030) 0:00:21.410 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:141 Monday 07 July 2025 20:34:01 -0400 (0:00:00.031) 0:00:21.441 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_firewall | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:148 Monday 07 July 2025 20:34:01 -0400 (0:00:00.031) 0:00:21.472 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:155 Monday 07 July 2025 20:34:01 -0400 (0:00:00.029) 0:00:21.502 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:159 Monday 07 July 2025 20:34:01 -0400 (0:00:00.031) 0:00:21.533 *********** skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:168 Monday 07 July 2025 20:34:01 -0400 (0:00:00.026) 0:00:21.559 *********** skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:177 Monday 07 July 2025 20:34:01 -0400 (0:00:00.028) 0:00:21.588 *********** skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:184 Monday 07 July 2025 20:34:01 -0400 (0:00:00.027) 0:00:21.615 *********** skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:191 Monday 07 July 2025 20:34:01 -0400 (0:00:00.027) 0:00:21.643 *********** included: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Monday 07 July 2025 20:34:01 -0400 (0:00:00.074) 0:00:21.717 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Monday 07 July 2025 20:34:01 -0400 (0:00:00.042) 0:00:21.760 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Monday 07 July 2025 20:34:01 -0400 (0:00:00.040) 0:00:21.801 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Monday 07 July 2025 20:34:01 -0400 (0:00:00.075) 0:00:21.876 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_name": "nopull", "__podman_quadlet_type": "container", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Monday 07 July 2025 20:34:01 -0400 (0:00:00.049) 0:00:21.926 *********** included: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Monday 07 July 2025 20:34:01 -0400 (0:00:00.059) 0:00:21.985 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Monday 07 July 2025 20:34:01 -0400 (0:00:00.035) 0:00:22.021 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Monday 07 July 2025 20:34:01 -0400 (0:00:00.037) 0:00:22.058 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Monday 07 July 2025 20:34:01 -0400 (0:00:00.043) 0:00:22.102 *********** ok: [managed-node1] => { "changed": false, "stat": { "atime": 1751934569.8134274, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "fa9845e044ad8d1bfcc68a2c8e62c8d83a1bb20e", "ctime": 1751934562.4243717, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 8668983, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1748217600.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15560, "uid": 0, "version": "1546733485", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Monday 07 July 2025 20:34:02 -0400 (0:00:00.388) 0:00:22.490 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Monday 07 July 2025 20:34:02 -0400 (0:00:00.055) 0:00:22.546 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Monday 07 July 2025 20:34:02 -0400 (0:00:00.043) 0:00:22.589 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Monday 07 July 2025 20:34:02 -0400 (0:00:00.038) 0:00:22.628 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Monday 07 July 2025 20:34:02 -0400 (0:00:00.040) 0:00:22.669 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Monday 07 July 2025 20:34:02 -0400 (0:00:00.036) 0:00:22.706 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Monday 07 July 2025 20:34:02 -0400 (0:00:00.036) 0:00:22.743 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Monday 07 July 2025 20:34:02 -0400 (0:00:00.032) 0:00:22.775 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Monday 07 July 2025 20:34:02 -0400 (0:00:00.034) 0:00:22.809 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "nopull.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Monday 07 July 2025 20:34:02 -0400 (0:00:00.051) 0:00:22.860 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Monday 07 July 2025 20:34:02 -0400 (0:00:00.034) 0:00:22.895 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:88 Monday 07 July 2025 20:34:02 -0400 (0:00:00.084) 0:00:22.979 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/nopull.container", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:106 Monday 07 July 2025 20:34:02 -0400 (0:00:00.099) 0:00:23.079 *********** ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:113 Monday 07 July 2025 20:34:02 -0400 (0:00:00.045) 0:00:23.125 *********** included: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Monday 07 July 2025 20:34:02 -0400 (0:00:00.085) 0:00:23.210 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Monday 07 July 2025 20:34:02 -0400 (0:00:00.043) 0:00:23.254 *********** ok: [managed-node1] => { "changed": false, "failed_when_result": false } MSG: Could not find the requested service nopull.service: host TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:34 Monday 07 July 2025 20:34:03 -0400 (0:00:00.775) 0:00:24.030 *********** ok: [managed-node1] => { "changed": false, "stat": { "atime": 1751934829.3562067, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "670d64fc68a9768edb20cad26df2acc703542d85", "ctime": 1751934829.3582067, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 641728709, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1751934828.9882042, "nlink": 1, "path": "/etc/containers/systemd/nopull.container", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 151, "uid": 0, "version": "1697896173", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:39 Monday 07 July 2025 20:34:04 -0400 (0:00:00.433) 0:00:24.464 *********** included: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Monday 07 July 2025 20:34:04 -0400 (0:00:00.095) 0:00:24.559 *********** ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Monday 07 July 2025 20:34:04 -0400 (0:00:00.516) 0:00:25.075 *********** fatal: [managed-node1]: FAILED! => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result" } TASK [Debug3] ****************************************************************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:270 Monday 07 July 2025 20:34:04 -0400 (0:00:00.037) 0:00:25.113 *********** fatal: [managed-node1]: FAILED! => { "changed": false, "cmd": "set -x\nset -o pipefail\nexec 1>&2\n#podman volume rm --all\n#podman network prune -f\npodman volume ls\npodman network ls\npodman secret ls\npodman container ls\npodman pod ls\npodman images\nsystemctl list-units | grep quadlet\n", "delta": "0:00:00.162374", "end": "2025-07-07 20:34:05.266728", "rc": 1, "start": "2025-07-07 20:34:05.104354" } STDERR: + set -o pipefail + exec + podman volume ls DRIVER VOLUME NAME + podman network ls NETWORK ID NAME DRIVER 2f259bab93aa podman bridge 4adc89c3d61f podman-default-kube-network bridge + podman secret ls ID NAME DRIVER CREATED UPDATED + podman container ls CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES + podman pod ls POD ID NAME STATUS CREATED INFRA ID # OF CONTAINERS + podman images REPOSITORY TAG IMAGE ID CREATED SIZE quay.io/libpod/registry 2.8.2 0030ba3d620c 23 months ago 24.6 MB localhost:5000/libpod/testimage 20210610 9f9ec7f2fdef 4 years ago 7.99 MB quay.io/libpod/testimage 20210610 9f9ec7f2fdef 4 years ago 7.99 MB + systemctl list-units + grep quadlet MSG: non-zero return code TASK [Cleanup user] ************************************************************ task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:299 Monday 07 July 2025 20:34:05 -0400 (0:00:00.552) 0:00:25.665 *********** included: fedora.linux_system_roles.podman for managed-node1 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Monday 07 July 2025 20:34:05 -0400 (0:00:00.123) 0:00:25.788 *********** included: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Monday 07 July 2025 20:34:05 -0400 (0:00:00.085) 0:00:25.874 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Monday 07 July 2025 20:34:05 -0400 (0:00:00.061) 0:00:25.935 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Monday 07 July 2025 20:34:05 -0400 (0:00:00.107) 0:00:26.043 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Monday 07 July 2025 20:34:05 -0400 (0:00:00.047) 0:00:26.091 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Monday 07 July 2025 20:34:05 -0400 (0:00:00.053) 0:00:26.144 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Monday 07 July 2025 20:34:05 -0400 (0:00:00.050) 0:00:26.195 *********** ok: [managed-node1] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node1] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node1] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node1] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Monday 07 July 2025 20:34:05 -0400 (0:00:00.110) 0:00:26.306 *********** ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Monday 07 July 2025 20:34:06 -0400 (0:00:00.960) 0:00:27.266 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Monday 07 July 2025 20:34:06 -0400 (0:00:00.031) 0:00:27.298 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages)) | list | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Monday 07 July 2025 20:34:07 -0400 (0:00:00.037) 0:00:27.336 *********** skipping: [managed-node1] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Monday 07 July 2025 20:34:07 -0400 (0:00:00.043) 0:00:27.379 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Monday 07 July 2025 20:34:07 -0400 (0:00:00.031) 0:00:27.410 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Monday 07 July 2025 20:34:07 -0400 (0:00:00.031) 0:00:27.442 *********** ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.025308", "end": "2025-07-07 20:34:07.456418", "rc": 0, "start": "2025-07-07 20:34:07.431110" } STDOUT: podman version 5.5.1 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Monday 07 July 2025 20:34:07 -0400 (0:00:00.393) 0:00:27.836 *********** ok: [managed-node1] => { "ansible_facts": { "podman_version": "5.5.1" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Monday 07 July 2025 20:34:07 -0400 (0:00:00.034) 0:00:27.870 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Monday 07 July 2025 20:34:07 -0400 (0:00:00.029) 0:00:27.900 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Monday 07 July 2025 20:34:07 -0400 (0:00:00.064) 0:00:27.964 *********** META: end_host conditional evaluated to False, continuing execution for managed-node1 skipping: [managed-node1] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1" } MSG: end_host conditional evaluated to false, continuing execution for managed-node1 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Monday 07 July 2025 20:34:07 -0400 (0:00:00.102) 0:00:28.066 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Monday 07 July 2025 20:34:07 -0400 (0:00:00.061) 0:00:28.128 *********** META: end_host conditional evaluated to False, continuing execution for managed-node1 skipping: [managed-node1] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1" } MSG: end_host conditional evaluated to false, continuing execution for managed-node1 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Monday 07 July 2025 20:34:07 -0400 (0:00:00.060) 0:00:28.188 *********** included: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Monday 07 July 2025 20:34:07 -0400 (0:00:00.060) 0:00:28.249 *********** ok: [managed-node1] => { "ansible_facts": { "getent_passwd": { "user_quadlet_basic": null } }, "changed": false } MSG: One or more supplied key could not be found in the database. TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Monday 07 July 2025 20:34:08 -0400 (0:00:00.383) 0:00:28.633 *********** fatal: [managed-node1]: FAILED! => { "changed": false } MSG: The given podman user [user_quadlet_basic] does not exist - cannot continue TASK [Dump journal] ************************************************************ task path: /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:336 Monday 07 July 2025 20:34:08 -0400 (0:00:00.039) 0:00:28.672 *********** fatal: [managed-node1]: FAILED! => { "changed": false, "cmd": [ "journalctl", "-ex" ], "delta": "0:00:00.037435", "end": "2025-07-07 20:34:08.697461", "failed_when_result": true, "rc": 0, "start": "2025-07-07 20:34:08.660026" } STDOUT: Jul 07 20:31:12 managed-node1 podman[33529]: 2025-07-07 20:31:12.683428814 -0400 EDT m=+0.105333506 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:12 managed-node1 auth_test_1_kube-auth_test_1_kube[33562]: This container is intended for podman CI testing Jul 07 20:31:12 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:12 managed-node1 podman[33570]: 2025-07-07 20:31:12.714820753 -0400 EDT m=+0.020442335 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:12 managed-node1 podman[33570]: 2025-07-07 20:31:12.727739741 -0400 EDT m=+0.033361322 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:12 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5293. Jul 07 20:31:12 managed-node1 podman[33570]: 2025-07-07 20:31:12.772309131 -0400 EDT m=+0.077930730 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:12 managed-node1 podman[33570]: 2025-07-07 20:31:12.774539798 -0400 EDT m=+0.080161434 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:12 managed-node1 auth_test_1_kube-auth_test_1_kube[33581]: This container is intended for podman CI testing Jul 07 20:31:12 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:12 managed-node1 conmon[33581]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events Jul 07 20:31:12 managed-node1 podman[33585]: 2025-07-07 20:31:12.80302532 -0400 EDT m=+0.019217516 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:12 managed-node1 podman[33585]: 2025-07-07 20:31:12.815878318 -0400 EDT m=+0.032070420 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:12 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5300. Jul 07 20:31:12 managed-node1 podman[33585]: 2025-07-07 20:31:12.858892733 -0400 EDT m=+0.075084844 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:31:12 managed-node1 podman[33585]: 2025-07-07 20:31:12.861608858 -0400 EDT m=+0.077801000 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:12 managed-node1 auth_test_1_kube-auth_test_1_kube[33597]: This container is intended for podman CI testing Jul 07 20:31:12 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:12 managed-node1 conmon[33597]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events Jul 07 20:31:12 managed-node1 podman[33601]: 2025-07-07 20:31:12.894645384 -0400 EDT m=+0.019661678 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:12 managed-node1 podman[33601]: 2025-07-07 20:31:12.907221489 -0400 EDT m=+0.032237740 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:31:12 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5307. Jul 07 20:31:12 managed-node1 podman[33601]: 2025-07-07 20:31:12.964011098 -0400 EDT m=+0.089027366 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:12 managed-node1 podman[33601]: 2025-07-07 20:31:12.966891903 -0400 EDT m=+0.091908222 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:12 managed-node1 auth_test_1_kube-auth_test_1_kube[33612]: This container is intended for podman CI testing Jul 07 20:31:12 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:12 managed-node1 podman[33616]: 2025-07-07 20:31:12.998349231 -0400 EDT m=+0.019035510 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:13 managed-node1 podman[33616]: 2025-07-07 20:31:13.011136143 -0400 EDT m=+0.031822176 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:13 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5314. Jul 07 20:31:13 managed-node1 podman[33616]: 2025-07-07 20:31:13.061983984 -0400 EDT m=+0.082670077 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:13 managed-node1 podman[33616]: 2025-07-07 20:31:13.064744859 -0400 EDT m=+0.085430930 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:13 managed-node1 auth_test_1_kube-auth_test_1_kube[33628]: This container is intended for podman CI testing Jul 07 20:31:13 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:13 managed-node1 podman[33656]: 2025-07-07 20:31:13.110348205 -0400 EDT m=+0.035549767 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:13 managed-node1 podman[33656]: 2025-07-07 20:31:13.123449504 -0400 EDT m=+0.048650883 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:13 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5321. Jul 07 20:31:13 managed-node1 podman[33656]: 2025-07-07 20:31:13.172982151 -0400 EDT m=+0.098183549 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:13 managed-node1 podman[33656]: 2025-07-07 20:31:13.175131488 -0400 EDT m=+0.100332897 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:13 managed-node1 auth_test_1_kube-auth_test_1_kube[33721]: This container is intended for podman CI testing Jul 07 20:31:13 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:13 managed-node1 podman[33725]: 2025-07-07 20:31:13.219388663 -0400 EDT m=+0.030083614 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:31:13 managed-node1 podman[33725]: 2025-07-07 20:31:13.23314399 -0400 EDT m=+0.043839011 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:13 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5328. Jul 07 20:31:13 managed-node1 podman[33725]: 2025-07-07 20:31:13.309022917 -0400 EDT m=+0.119718030 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:13 managed-node1 auth_test_1_kube-auth_test_1_kube[33789]: This container is intended for podman CI testing Jul 07 20:31:13 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:13 managed-node1 podman[33725]: 2025-07-07 20:31:13.315211739 -0400 EDT m=+0.125906753 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:13 managed-node1 conmon[33789]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events Jul 07 20:31:13 managed-node1 podman[33793]: 2025-07-07 20:31:13.364116084 -0400 EDT m=+0.033666071 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:13 managed-node1 podman[33793]: 2025-07-07 20:31:13.379887466 -0400 EDT m=+0.049437393 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:13 managed-node1 python3.12[33786]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:31:13 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5335. Jul 07 20:31:13 managed-node1 podman[33793]: 2025-07-07 20:31:13.447336083 -0400 EDT m=+0.116886092 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:13 managed-node1 auth_test_1_kube-auth_test_1_kube[33805]: This container is intended for podman CI testing Jul 07 20:31:13 managed-node1 podman[33793]: 2025-07-07 20:31:13.451322866 -0400 EDT m=+0.120872788 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:13 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:13 managed-node1 conmon[33805]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events Jul 07 20:31:13 managed-node1 podman[33815]: 2025-07-07 20:31:13.494306645 -0400 EDT m=+0.027771538 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:31:13 managed-node1 podman[33815]: 2025-07-07 20:31:13.507856915 -0400 EDT m=+0.041321733 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:13 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5342. Jul 07 20:31:13 managed-node1 podman[33815]: 2025-07-07 20:31:13.565306542 -0400 EDT m=+0.098771304 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:13 managed-node1 podman[33815]: 2025-07-07 20:31:13.568114494 -0400 EDT m=+0.101579151 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:31:13 managed-node1 auth_test_1_kube-auth_test_1_kube[33854]: This container is intended for podman CI testing Jul 07 20:31:13 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:13 managed-node1 podman[33858]: 2025-07-07 20:31:13.596989222 -0400 EDT m=+0.019144012 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:13 managed-node1 podman[33858]: 2025-07-07 20:31:13.609587102 -0400 EDT m=+0.031741858 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:13 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5349. Jul 07 20:31:13 managed-node1 podman[33858]: 2025-07-07 20:31:13.654974021 -0400 EDT m=+0.077128773 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:13 managed-node1 podman[33858]: 2025-07-07 20:31:13.657907701 -0400 EDT m=+0.080062528 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:13 managed-node1 auth_test_1_kube-auth_test_1_kube[33869]: This container is intended for podman CI testing Jul 07 20:31:13 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:13 managed-node1 podman[33873]: 2025-07-07 20:31:13.68697132 -0400 EDT m=+0.020528212 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:13 managed-node1 podman[33873]: 2025-07-07 20:31:13.699661283 -0400 EDT m=+0.033218130 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:13 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5356. Jul 07 20:31:13 managed-node1 podman[33873]: 2025-07-07 20:31:13.747651843 -0400 EDT m=+0.081208736 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:13 managed-node1 podman[33873]: 2025-07-07 20:31:13.74987183 -0400 EDT m=+0.083428708 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:13 managed-node1 auth_test_1_kube-auth_test_1_kube[33884]: This container is intended for podman CI testing Jul 07 20:31:13 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:13 managed-node1 conmon[33884]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events Jul 07 20:31:13 managed-node1 podman[33888]: 2025-07-07 20:31:13.781993925 -0400 EDT m=+0.018849586 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:13 managed-node1 podman[33888]: 2025-07-07 20:31:13.79513907 -0400 EDT m=+0.031994709 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:31:13 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5363. Jul 07 20:31:13 managed-node1 podman[33888]: 2025-07-07 20:31:13.844893937 -0400 EDT m=+0.081749582 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:13 managed-node1 podman[33888]: 2025-07-07 20:31:13.847083733 -0400 EDT m=+0.083939386 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:13 managed-node1 auth_test_1_kube-auth_test_1_kube[33899]: This container is intended for podman CI testing Jul 07 20:31:13 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:13 managed-node1 podman[33903]: 2025-07-07 20:31:13.879757907 -0400 EDT m=+0.019444443 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:13 managed-node1 podman[33903]: 2025-07-07 20:31:13.892569335 -0400 EDT m=+0.032255795 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:13 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5370. Jul 07 20:31:13 managed-node1 podman[33903]: 2025-07-07 20:31:13.941334631 -0400 EDT m=+0.081021075 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:13 managed-node1 podman[33903]: 2025-07-07 20:31:13.943528796 -0400 EDT m=+0.083215281 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:13 managed-node1 auth_test_1_kube-auth_test_1_kube[33915]: This container is intended for podman CI testing Jul 07 20:31:13 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:13 managed-node1 podman[33919]: 2025-07-07 20:31:13.974589798 -0400 EDT m=+0.018767491 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:13 managed-node1 podman[33919]: 2025-07-07 20:31:13.98711363 -0400 EDT m=+0.031291332 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:14 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5377. Jul 07 20:31:14 managed-node1 podman[33919]: 2025-07-07 20:31:14.032832185 -0400 EDT m=+0.077009950 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:14 managed-node1 podman[33919]: 2025-07-07 20:31:14.035068935 -0400 EDT m=+0.079246661 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:14 managed-node1 auth_test_1_kube-auth_test_1_kube[33930]: This container is intended for podman CI testing Jul 07 20:31:14 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:14 managed-node1 podman[33934]: 2025-07-07 20:31:14.066569543 -0400 EDT m=+0.019394285 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:31:14 managed-node1 podman[33934]: 2025-07-07 20:31:14.079769619 -0400 EDT m=+0.032594319 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:14 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5384. Jul 07 20:31:14 managed-node1 podman[33934]: 2025-07-07 20:31:14.12473653 -0400 EDT m=+0.077561363 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:31:14 managed-node1 podman[33934]: 2025-07-07 20:31:14.126932452 -0400 EDT m=+0.079757190 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:14 managed-node1 auth_test_1_kube-auth_test_1_kube[33945]: This container is intended for podman CI testing Jul 07 20:31:14 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:14 managed-node1 podman[33949]: 2025-07-07 20:31:14.155224261 -0400 EDT m=+0.018922277 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:14 managed-node1 podman[33949]: 2025-07-07 20:31:14.168612894 -0400 EDT m=+0.032311030 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:14 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5391. Jul 07 20:31:14 managed-node1 podman[33949]: 2025-07-07 20:31:14.215220959 -0400 EDT m=+0.078919011 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:14 managed-node1 podman[33949]: 2025-07-07 20:31:14.218912531 -0400 EDT m=+0.082610550 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:14 managed-node1 auth_test_1_kube-auth_test_1_kube[33960]: This container is intended for podman CI testing Jul 07 20:31:14 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:14 managed-node1 conmon[33960]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events Jul 07 20:31:14 managed-node1 podman[33964]: 2025-07-07 20:31:14.265654687 -0400 EDT m=+0.032124857 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:14 managed-node1 podman[33964]: 2025-07-07 20:31:14.279861874 -0400 EDT m=+0.046332005 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:14 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5398. Jul 07 20:31:14 managed-node1 podman[33964]: 2025-07-07 20:31:14.334875201 -0400 EDT m=+0.101345567 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:14 managed-node1 auth_test_1_kube-auth_test_1_kube[34029]: This container is intended for podman CI testing Jul 07 20:31:14 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:14 managed-node1 podman[33964]: 2025-07-07 20:31:14.34018797 -0400 EDT m=+0.106658397 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:14 managed-node1 conmon[34029]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events Jul 07 20:31:14 managed-node1 podman[34055]: 2025-07-07 20:31:14.376429756 -0400 EDT m=+0.023451628 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:14 managed-node1 podman[34055]: 2025-07-07 20:31:14.395117482 -0400 EDT m=+0.042139287 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:14 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5405. Jul 07 20:31:14 managed-node1 podman[34055]: 2025-07-07 20:31:14.450271735 -0400 EDT m=+0.097293739 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:31:14 managed-node1 auth_test_1_kube-auth_test_1_kube[34110]: This container is intended for podman CI testing Jul 07 20:31:14 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:14 managed-node1 podman[34055]: 2025-07-07 20:31:14.456393784 -0400 EDT m=+0.103415526 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:14 managed-node1 podman[34125]: 2025-07-07 20:31:14.501062804 -0400 EDT m=+0.032413735 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:14 managed-node1 podman[34125]: 2025-07-07 20:31:14.518461254 -0400 EDT m=+0.049812015 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:31:14 managed-node1 python3.12[34123]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:31:14 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5412. Jul 07 20:31:14 managed-node1 podman[34125]: 2025-07-07 20:31:14.630447885 -0400 EDT m=+0.161798906 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:14 managed-node1 auth_test_1_kube-auth_test_1_kube[34136]: This container is intended for podman CI testing Jul 07 20:31:14 managed-node1 podman[34125]: 2025-07-07 20:31:14.635762544 -0400 EDT m=+0.167113303 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:14 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:14 managed-node1 conmon[34136]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events Jul 07 20:31:14 managed-node1 podman[34142]: 2025-07-07 20:31:14.679832358 -0400 EDT m=+0.033244283 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:14 managed-node1 podman[34142]: 2025-07-07 20:31:14.692438145 -0400 EDT m=+0.045850011 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:14 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5419. Jul 07 20:31:14 managed-node1 podman[34142]: 2025-07-07 20:31:14.74450265 -0400 EDT m=+0.097914575 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:31:14 managed-node1 auth_test_1_kube-auth_test_1_kube[34178]: This container is intended for podman CI testing Jul 07 20:31:14 managed-node1 podman[34142]: 2025-07-07 20:31:14.746958703 -0400 EDT m=+0.100370582 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:14 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:14 managed-node1 podman[34182]: 2025-07-07 20:31:14.776198354 -0400 EDT m=+0.019494985 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:14 managed-node1 podman[34182]: 2025-07-07 20:31:14.788756224 -0400 EDT m=+0.032052812 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:14 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5426. Jul 07 20:31:14 managed-node1 podman[34182]: 2025-07-07 20:31:14.836923153 -0400 EDT m=+0.080219738 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:14 managed-node1 podman[34182]: 2025-07-07 20:31:14.839183071 -0400 EDT m=+0.082479859 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:14 managed-node1 auth_test_1_kube-auth_test_1_kube[34193]: This container is intended for podman CI testing Jul 07 20:31:14 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:14 managed-node1 podman[34197]: 2025-07-07 20:31:14.870828401 -0400 EDT m=+0.019244246 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:14 managed-node1 podman[34197]: 2025-07-07 20:31:14.883452864 -0400 EDT m=+0.031868683 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:14 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5433. Jul 07 20:31:14 managed-node1 podman[34197]: 2025-07-07 20:31:14.93358477 -0400 EDT m=+0.082000607 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:14 managed-node1 podman[34197]: 2025-07-07 20:31:14.935834195 -0400 EDT m=+0.084250078 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:14 managed-node1 auth_test_1_kube-auth_test_1_kube[34208]: This container is intended for podman CI testing Jul 07 20:31:14 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:14 managed-node1 podman[34212]: 2025-07-07 20:31:14.965314012 -0400 EDT m=+0.020484512 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:14 managed-node1 podman[34212]: 2025-07-07 20:31:14.977884857 -0400 EDT m=+0.033055350 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:15 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5440. Jul 07 20:31:15 managed-node1 podman[34212]: 2025-07-07 20:31:15.026957421 -0400 EDT m=+0.082127960 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:15 managed-node1 podman[34212]: 2025-07-07 20:31:15.029574061 -0400 EDT m=+0.084744570 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:15 managed-node1 auth_test_1_kube-auth_test_1_kube[34224]: This container is intended for podman CI testing Jul 07 20:31:15 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:15 managed-node1 podman[34228]: 2025-07-07 20:31:15.061604477 -0400 EDT m=+0.019250272 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:31:15 managed-node1 podman[34228]: 2025-07-07 20:31:15.074116308 -0400 EDT m=+0.031762079 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:15 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5447. Jul 07 20:31:15 managed-node1 podman[34228]: 2025-07-07 20:31:15.126266773 -0400 EDT m=+0.083912530 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:15 managed-node1 podman[34228]: 2025-07-07 20:31:15.128457455 -0400 EDT m=+0.086103265 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:15 managed-node1 auth_test_1_kube-auth_test_1_kube[34239]: This container is intended for podman CI testing Jul 07 20:31:15 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:15 managed-node1 podman[34243]: 2025-07-07 20:31:15.160290079 -0400 EDT m=+0.019739730 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:15 managed-node1 podman[34243]: 2025-07-07 20:31:15.173005861 -0400 EDT m=+0.032455438 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:15 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5454. Jul 07 20:31:15 managed-node1 podman[34243]: 2025-07-07 20:31:15.224078649 -0400 EDT m=+0.083528292 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:15 managed-node1 auth_test_1_kube-auth_test_1_kube[34255]: This container is intended for podman CI testing Jul 07 20:31:15 managed-node1 podman[34243]: 2025-07-07 20:31:15.226519624 -0400 EDT m=+0.085969316 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:15 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:15 managed-node1 podman[34259]: 2025-07-07 20:31:15.258364625 -0400 EDT m=+0.020012982 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:31:15 managed-node1 podman[34259]: 2025-07-07 20:31:15.271668865 -0400 EDT m=+0.033317190 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:15 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5461. Jul 07 20:31:15 managed-node1 podman[34259]: 2025-07-07 20:31:15.313951727 -0400 EDT m=+0.075599997 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:15 managed-node1 podman[34259]: 2025-07-07 20:31:15.316172882 -0400 EDT m=+0.077821247 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:15 managed-node1 auth_test_1_kube-auth_test_1_kube[34270]: This container is intended for podman CI testing Jul 07 20:31:15 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:15 managed-node1 podman[34274]: 2025-07-07 20:31:15.344612573 -0400 EDT m=+0.019589464 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:15 managed-node1 podman[34274]: 2025-07-07 20:31:15.357001121 -0400 EDT m=+0.031977981 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:15 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5468. Jul 07 20:31:15 managed-node1 podman[34274]: 2025-07-07 20:31:15.404767874 -0400 EDT m=+0.079744848 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:15 managed-node1 podman[34274]: 2025-07-07 20:31:15.406968578 -0400 EDT m=+0.081945501 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:15 managed-node1 auth_test_1_kube-auth_test_1_kube[34285]: This container is intended for podman CI testing Jul 07 20:31:15 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:15 managed-node1 podman[34289]: 2025-07-07 20:31:15.435334159 -0400 EDT m=+0.019393882 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:31:15 managed-node1 podman[34289]: 2025-07-07 20:31:15.448792134 -0400 EDT m=+0.032851696 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:15 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5475. Jul 07 20:31:15 managed-node1 podman[34289]: 2025-07-07 20:31:15.497735523 -0400 EDT m=+0.081795097 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:31:15 managed-node1 podman[34289]: 2025-07-07 20:31:15.500052412 -0400 EDT m=+0.084111958 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:15 managed-node1 auth_test_1_kube-auth_test_1_kube[34300]: This container is intended for podman CI testing Jul 07 20:31:15 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:15 managed-node1 podman[34304]: 2025-07-07 20:31:15.531670941 -0400 EDT m=+0.019281505 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:15 managed-node1 podman[34304]: 2025-07-07 20:31:15.54408611 -0400 EDT m=+0.031696642 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:31:15 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5482. Jul 07 20:31:15 managed-node1 podman[34304]: 2025-07-07 20:31:15.592327547 -0400 EDT m=+0.079938093 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:15 managed-node1 podman[34304]: 2025-07-07 20:31:15.594549246 -0400 EDT m=+0.082159810 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:15 managed-node1 auth_test_1_kube-auth_test_1_kube[34316]: This container is intended for podman CI testing Jul 07 20:31:15 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:15 managed-node1 podman[34320]: 2025-07-07 20:31:15.625754289 -0400 EDT m=+0.019330333 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:15 managed-node1 podman[34320]: 2025-07-07 20:31:15.638955363 -0400 EDT m=+0.032531274 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:15 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5489. Jul 07 20:31:15 managed-node1 podman[34320]: 2025-07-07 20:31:15.683628604 -0400 EDT m=+0.077204606 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:31:15 managed-node1 podman[34320]: 2025-07-07 20:31:15.686024497 -0400 EDT m=+0.079600478 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:15 managed-node1 auth_test_1_kube-auth_test_1_kube[34332]: This container is intended for podman CI testing Jul 07 20:31:15 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:15 managed-node1 podman[34336]: 2025-07-07 20:31:15.719468906 -0400 EDT m=+0.020475172 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:15 managed-node1 podman[34336]: 2025-07-07 20:31:15.732296519 -0400 EDT m=+0.033302770 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:15 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5496. Jul 07 20:31:15 managed-node1 podman[34336]: 2025-07-07 20:31:15.783618488 -0400 EDT m=+0.084624797 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:15 managed-node1 podman[34336]: 2025-07-07 20:31:15.786287122 -0400 EDT m=+0.087293426 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:15 managed-node1 auth_test_1_kube-auth_test_1_kube[34347]: This container is intended for podman CI testing Jul 07 20:31:15 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:15 managed-node1 conmon[34347]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events Jul 07 20:31:15 managed-node1 podman[34351]: 2025-07-07 20:31:15.818032646 -0400 EDT m=+0.019738574 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:15 managed-node1 podman[34351]: 2025-07-07 20:31:15.830825849 -0400 EDT m=+0.032531760 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:31:15 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5503. Jul 07 20:31:15 managed-node1 podman[34351]: 2025-07-07 20:31:15.873881148 -0400 EDT m=+0.075587030 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:15 managed-node1 podman[34351]: 2025-07-07 20:31:15.876142251 -0400 EDT m=+0.077848220 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:15 managed-node1 auth_test_1_kube-auth_test_1_kube[34362]: This container is intended for podman CI testing Jul 07 20:31:15 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:15 managed-node1 conmon[34362]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events Jul 07 20:31:15 managed-node1 podman[34366]: 2025-07-07 20:31:15.905032665 -0400 EDT m=+0.019851791 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:15 managed-node1 podman[34366]: 2025-07-07 20:31:15.917579888 -0400 EDT m=+0.032399005 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:15 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5510. Jul 07 20:31:15 managed-node1 podman[34366]: 2025-07-07 20:31:15.963523299 -0400 EDT m=+0.078342439 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:15 managed-node1 podman[34366]: 2025-07-07 20:31:15.965793476 -0400 EDT m=+0.080612642 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:15 managed-node1 auth_test_1_kube-auth_test_1_kube[34378]: This container is intended for podman CI testing Jul 07 20:31:15 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:15 managed-node1 conmon[34378]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events Jul 07 20:31:15 managed-node1 podman[34382]: 2025-07-07 20:31:15.994098465 -0400 EDT m=+0.018663496 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:16 managed-node1 podman[34382]: 2025-07-07 20:31:16.006855796 -0400 EDT m=+0.031420807 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:31:16 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5517. Jul 07 20:31:16 managed-node1 podman[34382]: 2025-07-07 20:31:16.051920432 -0400 EDT m=+0.076485439 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:16 managed-node1 podman[34382]: 2025-07-07 20:31:16.054144472 -0400 EDT m=+0.078709526 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:16 managed-node1 auth_test_1_kube-auth_test_1_kube[34393]: This container is intended for podman CI testing Jul 07 20:31:16 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:16 managed-node1 conmon[34393]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events Jul 07 20:31:16 managed-node1 podman[34397]: 2025-07-07 20:31:16.087613293 -0400 EDT m=+0.020828534 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:16 managed-node1 podman[34397]: 2025-07-07 20:31:16.10016147 -0400 EDT m=+0.033376682 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:16 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5524. Jul 07 20:31:16 managed-node1 podman[34397]: 2025-07-07 20:31:16.151131621 -0400 EDT m=+0.084346910 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:16 managed-node1 podman[34397]: 2025-07-07 20:31:16.153850184 -0400 EDT m=+0.087065443 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:31:16 managed-node1 auth_test_1_kube-auth_test_1_kube[34409]: This container is intended for podman CI testing Jul 07 20:31:16 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:16 managed-node1 conmon[34409]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events Jul 07 20:31:16 managed-node1 podman[34413]: 2025-07-07 20:31:16.184001697 -0400 EDT m=+0.019611777 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:31:16 managed-node1 podman[34413]: 2025-07-07 20:31:16.196698451 -0400 EDT m=+0.032308515 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:16 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5531. Jul 07 20:31:16 managed-node1 podman[34413]: 2025-07-07 20:31:16.242174277 -0400 EDT m=+0.077784338 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:16 managed-node1 podman[34413]: 2025-07-07 20:31:16.244405166 -0400 EDT m=+0.080015426 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:16 managed-node1 auth_test_1_kube-auth_test_1_kube[34424]: This container is intended for podman CI testing Jul 07 20:31:16 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:16 managed-node1 conmon[34424]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events Jul 07 20:31:16 managed-node1 podman[34428]: 2025-07-07 20:31:16.274633397 -0400 EDT m=+0.019295849 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:16 managed-node1 podman[34428]: 2025-07-07 20:31:16.287296957 -0400 EDT m=+0.031959384 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:16 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5538. Jul 07 20:31:16 managed-node1 podman[34428]: 2025-07-07 20:31:16.336618852 -0400 EDT m=+0.081281359 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:16 managed-node1 podman[34428]: 2025-07-07 20:31:16.338866161 -0400 EDT m=+0.083528626 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:16 managed-node1 auth_test_1_kube-auth_test_1_kube[34439]: This container is intended for podman CI testing Jul 07 20:31:16 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:16 managed-node1 podman[34443]: 2025-07-07 20:31:16.371476668 -0400 EDT m=+0.019018051 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:16 managed-node1 podman[34443]: 2025-07-07 20:31:16.382957645 -0400 EDT m=+0.030498984 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:16 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5545. Jul 07 20:31:16 managed-node1 podman[34443]: 2025-07-07 20:31:16.426945682 -0400 EDT m=+0.074487039 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:16 managed-node1 podman[34443]: 2025-07-07 20:31:16.429176173 -0400 EDT m=+0.076717563 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:16 managed-node1 auth_test_1_kube-auth_test_1_kube[34454]: This container is intended for podman CI testing Jul 07 20:31:16 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:16 managed-node1 conmon[34454]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events Jul 07 20:31:16 managed-node1 podman[34458]: 2025-07-07 20:31:16.462935007 -0400 EDT m=+0.021082930 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:16 managed-node1 podman[34458]: 2025-07-07 20:31:16.475166055 -0400 EDT m=+0.033313981 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:31:16 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5552. Jul 07 20:31:16 managed-node1 podman[34458]: 2025-07-07 20:31:16.527868549 -0400 EDT m=+0.086016469 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:16 managed-node1 podman[34458]: 2025-07-07 20:31:16.530602277 -0400 EDT m=+0.088750254 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:16 managed-node1 auth_test_1_kube-auth_test_1_kube[34469]: This container is intended for podman CI testing Jul 07 20:31:16 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:16 managed-node1 podman[34473]: 2025-07-07 20:31:16.563116714 -0400 EDT m=+0.019184411 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:16 managed-node1 podman[34473]: 2025-07-07 20:31:16.575253939 -0400 EDT m=+0.031321623 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:16 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5559. Jul 07 20:31:16 managed-node1 podman[34473]: 2025-07-07 20:31:16.630976001 -0400 EDT m=+0.087043705 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:16 managed-node1 podman[34473]: 2025-07-07 20:31:16.633675896 -0400 EDT m=+0.089743616 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:16 managed-node1 auth_test_1_kube-auth_test_1_kube[34485]: This container is intended for podman CI testing Jul 07 20:31:16 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:16 managed-node1 podman[34489]: 2025-07-07 20:31:16.666341559 -0400 EDT m=+0.019125869 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:16 managed-node1 podman[34489]: 2025-07-07 20:31:16.678990158 -0400 EDT m=+0.031774458 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:31:16 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5566. Jul 07 20:31:16 managed-node1 podman[34489]: 2025-07-07 20:31:16.72243168 -0400 EDT m=+0.075216031 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:16 managed-node1 podman[34489]: 2025-07-07 20:31:16.725180691 -0400 EDT m=+0.077965012 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:16 managed-node1 auth_test_1_kube-auth_test_1_kube[34500]: This container is intended for podman CI testing Jul 07 20:31:16 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:16 managed-node1 podman[34504]: 2025-07-07 20:31:16.754814029 -0400 EDT m=+0.021391737 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:16 managed-node1 podman[34504]: 2025-07-07 20:31:16.767687927 -0400 EDT m=+0.034265655 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:16 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5573. Jul 07 20:31:16 managed-node1 podman[34504]: 2025-07-07 20:31:16.813137127 -0400 EDT m=+0.079714972 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:16 managed-node1 podman[34504]: 2025-07-07 20:31:16.815411872 -0400 EDT m=+0.081989653 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:16 managed-node1 auth_test_1_kube-auth_test_1_kube[34515]: This container is intended for podman CI testing Jul 07 20:31:16 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:16 managed-node1 conmon[34515]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events Jul 07 20:31:16 managed-node1 podman[34519]: 2025-07-07 20:31:16.849132759 -0400 EDT m=+0.018817320 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:16 managed-node1 podman[34519]: 2025-07-07 20:31:16.862312923 -0400 EDT m=+0.031997574 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:16 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5580. Jul 07 20:31:16 managed-node1 podman[34519]: 2025-07-07 20:31:16.906674999 -0400 EDT m=+0.076359616 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:16 managed-node1 podman[34519]: 2025-07-07 20:31:16.909880486 -0400 EDT m=+0.079565172 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:16 managed-node1 auth_test_1_kube-auth_test_1_kube[34530]: This container is intended for podman CI testing Jul 07 20:31:16 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:16 managed-node1 podman[34534]: 2025-07-07 20:31:16.95844908 -0400 EDT m=+0.033673257 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:16 managed-node1 podman[34534]: 2025-07-07 20:31:16.971595578 -0400 EDT m=+0.046819804 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:17 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5587. Jul 07 20:31:17 managed-node1 podman[34534]: 2025-07-07 20:31:17.025056202 -0400 EDT m=+0.100280598 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:17 managed-node1 podman[34534]: 2025-07-07 20:31:17.029457616 -0400 EDT m=+0.104681754 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:17 managed-node1 auth_test_1_kube-auth_test_1_kube[34599]: This container is intended for podman CI testing Jul 07 20:31:17 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:17 managed-node1 podman[34627]: 2025-07-07 20:31:17.070393858 -0400 EDT m=+0.025970516 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:17 managed-node1 podman[34627]: 2025-07-07 20:31:17.084512102 -0400 EDT m=+0.040088565 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:17 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5594. Jul 07 20:31:17 managed-node1 podman[34627]: 2025-07-07 20:31:17.177102708 -0400 EDT m=+0.132679404 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:17 managed-node1 auth_test_1_kube-auth_test_1_kube[34683]: This container is intended for podman CI testing Jul 07 20:31:17 managed-node1 podman[34627]: 2025-07-07 20:31:17.180894161 -0400 EDT m=+0.136470572 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:17 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:17 managed-node1 podman[34695]: 2025-07-07 20:31:17.231847651 -0400 EDT m=+0.034297314 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:17 managed-node1 podman[34695]: 2025-07-07 20:31:17.246740629 -0400 EDT m=+0.049190236 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:31:17 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5601. Jul 07 20:31:17 managed-node1 python3.12[34693]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:31:17 managed-node1 podman[34695]: 2025-07-07 20:31:17.30599944 -0400 EDT m=+0.108449088 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:31:17 managed-node1 podman[34695]: 2025-07-07 20:31:17.308938661 -0400 EDT m=+0.111388407 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:17 managed-node1 auth_test_1_kube-auth_test_1_kube[34706]: This container is intended for podman CI testing Jul 07 20:31:17 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:17 managed-node1 conmon[34706]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events Jul 07 20:31:17 managed-node1 podman[34713]: 2025-07-07 20:31:17.350459916 -0400 EDT m=+0.024235490 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:17 managed-node1 podman[34713]: 2025-07-07 20:31:17.365971326 -0400 EDT m=+0.039746708 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:17 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5608. Jul 07 20:31:17 managed-node1 podman[34713]: 2025-07-07 20:31:17.415348107 -0400 EDT m=+0.089123488 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:17 managed-node1 podman[34713]: 2025-07-07 20:31:17.417592199 -0400 EDT m=+0.091367661 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:17 managed-node1 auth_test_1_kube-auth_test_1_kube[34750]: This container is intended for podman CI testing Jul 07 20:31:17 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:17 managed-node1 podman[34754]: 2025-07-07 20:31:17.448087244 -0400 EDT m=+0.021608245 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:17 managed-node1 podman[34754]: 2025-07-07 20:31:17.460699437 -0400 EDT m=+0.034220411 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:17 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5615. Jul 07 20:31:17 managed-node1 podman[34754]: 2025-07-07 20:31:17.506809745 -0400 EDT m=+0.080330782 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:17 managed-node1 podman[34754]: 2025-07-07 20:31:17.509127066 -0400 EDT m=+0.082648093 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:17 managed-node1 auth_test_1_kube-auth_test_1_kube[34765]: This container is intended for podman CI testing Jul 07 20:31:17 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:17 managed-node1 podman[34769]: 2025-07-07 20:31:17.542481404 -0400 EDT m=+0.021358978 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:17 managed-node1 podman[34769]: 2025-07-07 20:31:17.5570775 -0400 EDT m=+0.035954986 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:17 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5622. Jul 07 20:31:17 managed-node1 podman[34769]: 2025-07-07 20:31:17.60711597 -0400 EDT m=+0.085993501 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:17 managed-node1 podman[34769]: 2025-07-07 20:31:17.609755396 -0400 EDT m=+0.088632916 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:17 managed-node1 auth_test_1_kube-auth_test_1_kube[34780]: This container is intended for podman CI testing Jul 07 20:31:17 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:17 managed-node1 podman[34784]: 2025-07-07 20:31:17.640012378 -0400 EDT m=+0.021140746 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:17 managed-node1 podman[34784]: 2025-07-07 20:31:17.652490475 -0400 EDT m=+0.033618838 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:17 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5629. Jul 07 20:31:17 managed-node1 podman[34784]: 2025-07-07 20:31:17.70011701 -0400 EDT m=+0.081245479 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:31:17 managed-node1 auth_test_1_kube-auth_test_1_kube[34795]: This container is intended for podman CI testing Jul 07 20:31:17 managed-node1 podman[34784]: 2025-07-07 20:31:17.703762734 -0400 EDT m=+0.084891179 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:31:17 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:17 managed-node1 podman[34799]: 2025-07-07 20:31:17.736074614 -0400 EDT m=+0.020056195 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:17 managed-node1 podman[34799]: 2025-07-07 20:31:17.748527888 -0400 EDT m=+0.032509382 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:17 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5636. Jul 07 20:31:17 managed-node1 podman[34799]: 2025-07-07 20:31:17.79543748 -0400 EDT m=+0.079419067 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:17 managed-node1 podman[34799]: 2025-07-07 20:31:17.798751634 -0400 EDT m=+0.082733139 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:17 managed-node1 auth_test_1_kube-auth_test_1_kube[34810]: This container is intended for podman CI testing Jul 07 20:31:17 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:17 managed-node1 conmon[34810]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events Jul 07 20:31:17 managed-node1 podman[34814]: 2025-07-07 20:31:17.831629085 -0400 EDT m=+0.020108874 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:31:17 managed-node1 podman[34814]: 2025-07-07 20:31:17.844326128 -0400 EDT m=+0.032805907 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:17 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5643. Jul 07 20:31:17 managed-node1 podman[34814]: 2025-07-07 20:31:17.898323544 -0400 EDT m=+0.086803336 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:17 managed-node1 podman[34814]: 2025-07-07 20:31:17.900673692 -0400 EDT m=+0.089153423 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:17 managed-node1 auth_test_1_kube-auth_test_1_kube[34825]: This container is intended for podman CI testing Jul 07 20:31:17 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:17 managed-node1 podman[34829]: 2025-07-07 20:31:17.931521246 -0400 EDT m=+0.021848806 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:17 managed-node1 podman[34829]: 2025-07-07 20:31:17.94406602 -0400 EDT m=+0.034393554 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:17 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5650. Jul 07 20:31:17 managed-node1 podman[34829]: 2025-07-07 20:31:17.996305996 -0400 EDT m=+0.086633529 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:31:17 managed-node1 podman[34829]: 2025-07-07 20:31:17.998742848 -0400 EDT m=+0.089070425 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:17 managed-node1 auth_test_1_kube-auth_test_1_kube[34841]: This container is intended for podman CI testing Jul 07 20:31:18 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:18 managed-node1 podman[34845]: 2025-07-07 20:31:18.029185879 -0400 EDT m=+0.019114003 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:18 managed-node1 podman[34845]: 2025-07-07 20:31:18.042521069 -0400 EDT m=+0.032449143 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:18 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5657. Jul 07 20:31:18 managed-node1 podman[34845]: 2025-07-07 20:31:18.092988624 -0400 EDT m=+0.082916705 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:18 managed-node1 podman[34845]: 2025-07-07 20:31:18.095313174 -0400 EDT m=+0.085241278 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:31:18 managed-node1 auth_test_1_kube-auth_test_1_kube[34857]: This container is intended for podman CI testing Jul 07 20:31:18 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:18 managed-node1 podman[34861]: 2025-07-07 20:31:18.129668866 -0400 EDT m=+0.021224565 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:18 managed-node1 podman[34861]: 2025-07-07 20:31:18.142326085 -0400 EDT m=+0.033881760 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:18 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5664. Jul 07 20:31:18 managed-node1 podman[34861]: 2025-07-07 20:31:18.19279771 -0400 EDT m=+0.084353472 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:18 managed-node1 podman[34861]: 2025-07-07 20:31:18.195648063 -0400 EDT m=+0.087203915 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:18 managed-node1 auth_test_1_kube-auth_test_1_kube[34872]: This container is intended for podman CI testing Jul 07 20:31:18 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:18 managed-node1 podman[34876]: 2025-07-07 20:31:18.229439645 -0400 EDT m=+0.020253492 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:18 managed-node1 podman[34876]: 2025-07-07 20:31:18.24218817 -0400 EDT m=+0.033002023 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:18 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5671. Jul 07 20:31:18 managed-node1 podman[34876]: 2025-07-07 20:31:18.29392756 -0400 EDT m=+0.084741430 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:18 managed-node1 podman[34876]: 2025-07-07 20:31:18.296287803 -0400 EDT m=+0.087101686 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:18 managed-node1 auth_test_1_kube-auth_test_1_kube[34888]: This container is intended for podman CI testing Jul 07 20:31:18 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:18 managed-node1 podman[34892]: 2025-07-07 20:31:18.32955635 -0400 EDT m=+0.020182773 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:31:18 managed-node1 podman[34892]: 2025-07-07 20:31:18.342477137 -0400 EDT m=+0.033103451 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:18 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5678. Jul 07 20:31:18 managed-node1 podman[34892]: 2025-07-07 20:31:18.394419816 -0400 EDT m=+0.085046197 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:18 managed-node1 podman[34892]: 2025-07-07 20:31:18.397243808 -0400 EDT m=+0.087870271 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:18 managed-node1 auth_test_1_kube-auth_test_1_kube[34903]: This container is intended for podman CI testing Jul 07 20:31:18 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:18 managed-node1 podman[34907]: 2025-07-07 20:31:18.429250615 -0400 EDT m=+0.019856460 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:18 managed-node1 podman[34907]: 2025-07-07 20:31:18.441783606 -0400 EDT m=+0.032389406 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:18 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5685. Jul 07 20:31:18 managed-node1 podman[34907]: 2025-07-07 20:31:18.499091104 -0400 EDT m=+0.089697198 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:31:18 managed-node1 podman[34907]: 2025-07-07 20:31:18.503340104 -0400 EDT m=+0.093946062 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:31:18 managed-node1 auth_test_1_kube-auth_test_1_kube[34943]: This container is intended for podman CI testing Jul 07 20:31:18 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:18 managed-node1 podman[34964]: 2025-07-07 20:31:18.543763048 -0400 EDT m=+0.023752219 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:18 managed-node1 podman[34964]: 2025-07-07 20:31:18.560167521 -0400 EDT m=+0.040156547 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:18 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5692. Jul 07 20:31:18 managed-node1 podman[34964]: 2025-07-07 20:31:18.612046282 -0400 EDT m=+0.092035672 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:18 managed-node1 auth_test_1_kube-auth_test_1_kube[35012]: This container is intended for podman CI testing Jul 07 20:31:18 managed-node1 podman[34964]: 2025-07-07 20:31:18.615806989 -0400 EDT m=+0.095796050 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:18 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:18 managed-node1 conmon[35012]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events Jul 07 20:31:18 managed-node1 podman[35041]: 2025-07-07 20:31:18.668478401 -0400 EDT m=+0.035538417 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:18 managed-node1 podman[35041]: 2025-07-07 20:31:18.683517085 -0400 EDT m=+0.050577052 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:18 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5699. Jul 07 20:31:18 managed-node1 podman[35041]: 2025-07-07 20:31:18.763957069 -0400 EDT m=+0.131017381 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:18 managed-node1 auth_test_1_kube-auth_test_1_kube[35079]: This container is intended for podman CI testing Jul 07 20:31:18 managed-node1 podman[35041]: 2025-07-07 20:31:18.767955059 -0400 EDT m=+0.135015017 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:18 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:18 managed-node1 podman[35085]: 2025-07-07 20:31:18.820556221 -0400 EDT m=+0.036984989 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:18 managed-node1 podman[35085]: 2025-07-07 20:31:18.836884195 -0400 EDT m=+0.053313095 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:18 managed-node1 python3.12[35074]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:31:18 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5706. Jul 07 20:31:18 managed-node1 podman[35085]: 2025-07-07 20:31:18.897084147 -0400 EDT m=+0.113513334 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:18 managed-node1 auth_test_1_kube-auth_test_1_kube[35098]: This container is intended for podman CI testing Jul 07 20:31:18 managed-node1 podman[35085]: 2025-07-07 20:31:18.901223207 -0400 EDT m=+0.117651974 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:18 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:18 managed-node1 conmon[35098]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events Jul 07 20:31:18 managed-node1 podman[35123]: 2025-07-07 20:31:18.935007612 -0400 EDT m=+0.019974481 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:18 managed-node1 podman[35123]: 2025-07-07 20:31:18.947889427 -0400 EDT m=+0.032856264 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:18 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5713. Jul 07 20:31:18 managed-node1 podman[35123]: 2025-07-07 20:31:18.991577845 -0400 EDT m=+0.076544712 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:31:18 managed-node1 podman[35123]: 2025-07-07 20:31:18.993849057 -0400 EDT m=+0.078815939 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:31:18 managed-node1 auth_test_1_kube-auth_test_1_kube[35138]: This container is intended for podman CI testing Jul 07 20:31:18 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:18 managed-node1 conmon[35138]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events Jul 07 20:31:19 managed-node1 podman[35143]: 2025-07-07 20:31:19.023419114 -0400 EDT m=+0.019988935 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:19 managed-node1 podman[35143]: 2025-07-07 20:31:19.036118694 -0400 EDT m=+0.032688485 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:19 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5720. Jul 07 20:31:19 managed-node1 podman[35143]: 2025-07-07 20:31:19.087366897 -0400 EDT m=+0.083936778 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:31:19 managed-node1 podman[35143]: 2025-07-07 20:31:19.089680483 -0400 EDT m=+0.086250306 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:19 managed-node1 auth_test_1_kube-auth_test_1_kube[35154]: This container is intended for podman CI testing Jul 07 20:31:19 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:19 managed-node1 podman[35158]: 2025-07-07 20:31:19.122637177 -0400 EDT m=+0.020242589 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:31:19 managed-node1 podman[35158]: 2025-07-07 20:31:19.135301853 -0400 EDT m=+0.032907248 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:19 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5727. Jul 07 20:31:19 managed-node1 podman[35158]: 2025-07-07 20:31:19.186419369 -0400 EDT m=+0.084024763 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:19 managed-node1 podman[35158]: 2025-07-07 20:31:19.188698909 -0400 EDT m=+0.086304571 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:19 managed-node1 auth_test_1_kube-auth_test_1_kube[35170]: This container is intended for podman CI testing Jul 07 20:31:19 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:19 managed-node1 podman[35174]: 2025-07-07 20:31:19.217758606 -0400 EDT m=+0.020317035 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:19 managed-node1 podman[35174]: 2025-07-07 20:31:19.230886804 -0400 EDT m=+0.033445173 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:19 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5734. Jul 07 20:31:19 managed-node1 podman[35174]: 2025-07-07 20:31:19.275277786 -0400 EDT m=+0.077836183 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:19 managed-node1 podman[35174]: 2025-07-07 20:31:19.277533735 -0400 EDT m=+0.080092190 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:19 managed-node1 auth_test_1_kube-auth_test_1_kube[35185]: This container is intended for podman CI testing Jul 07 20:31:19 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:19 managed-node1 conmon[35185]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events Jul 07 20:31:19 managed-node1 podman[35190]: 2025-07-07 20:31:19.309991866 -0400 EDT m=+0.019488519 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:31:19 managed-node1 podman[35190]: 2025-07-07 20:31:19.322658146 -0400 EDT m=+0.032154753 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:19 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5741. Jul 07 20:31:19 managed-node1 podman[35190]: 2025-07-07 20:31:19.370610614 -0400 EDT m=+0.080107346 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:19 managed-node1 podman[35190]: 2025-07-07 20:31:19.372876842 -0400 EDT m=+0.082373476 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:19 managed-node1 auth_test_1_kube-auth_test_1_kube[35201]: This container is intended for podman CI testing Jul 07 20:31:19 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:19 managed-node1 podman[35205]: 2025-07-07 20:31:19.404869858 -0400 EDT m=+0.019126064 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:19 managed-node1 podman[35205]: 2025-07-07 20:31:19.417976636 -0400 EDT m=+0.032232823 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:19 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5748. Jul 07 20:31:19 managed-node1 podman[35205]: 2025-07-07 20:31:19.470131778 -0400 EDT m=+0.084388043 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:19 managed-node1 podman[35205]: 2025-07-07 20:31:19.472376286 -0400 EDT m=+0.086632540 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:19 managed-node1 auth_test_1_kube-auth_test_1_kube[35216]: This container is intended for podman CI testing Jul 07 20:31:19 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:19 managed-node1 podman[35220]: 2025-07-07 20:31:19.503576586 -0400 EDT m=+0.019196560 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:19 managed-node1 podman[35220]: 2025-07-07 20:31:19.516745509 -0400 EDT m=+0.032365490 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:31:19 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5755. Jul 07 20:31:19 managed-node1 podman[35220]: 2025-07-07 20:31:19.564932147 -0400 EDT m=+0.080552114 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:31:19 managed-node1 podman[35220]: 2025-07-07 20:31:19.567622797 -0400 EDT m=+0.083242787 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:31:19 managed-node1 auth_test_1_kube-auth_test_1_kube[35232]: This container is intended for podman CI testing Jul 07 20:31:19 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:19 managed-node1 podman[35236]: 2025-07-07 20:31:19.59907771 -0400 EDT m=+0.019603882 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:19 managed-node1 podman[35236]: 2025-07-07 20:31:19.611520721 -0400 EDT m=+0.032046839 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:19 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5762. Jul 07 20:31:19 managed-node1 podman[35236]: 2025-07-07 20:31:19.65544326 -0400 EDT m=+0.075969379 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:19 managed-node1 podman[35236]: 2025-07-07 20:31:19.658256486 -0400 EDT m=+0.078782651 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:19 managed-node1 auth_test_1_kube-auth_test_1_kube[35248]: This container is intended for podman CI testing Jul 07 20:31:19 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:19 managed-node1 podman[35252]: 2025-07-07 20:31:19.6872466 -0400 EDT m=+0.019976252 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:19 managed-node1 podman[35252]: 2025-07-07 20:31:19.699843173 -0400 EDT m=+0.032572770 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:31:19 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5769. Jul 07 20:31:19 managed-node1 podman[35252]: 2025-07-07 20:31:19.748941031 -0400 EDT m=+0.081670642 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:19 managed-node1 podman[35252]: 2025-07-07 20:31:19.751143098 -0400 EDT m=+0.083872738 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:19 managed-node1 auth_test_1_kube-auth_test_1_kube[35263]: This container is intended for podman CI testing Jul 07 20:31:19 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:19 managed-node1 podman[35267]: 2025-07-07 20:31:19.780255667 -0400 EDT m=+0.019815074 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:19 managed-node1 podman[35267]: 2025-07-07 20:31:19.793478385 -0400 EDT m=+0.033037771 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:19 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5776. Jul 07 20:31:19 managed-node1 podman[35267]: 2025-07-07 20:31:19.850378106 -0400 EDT m=+0.089937499 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:19 managed-node1 podman[35267]: 2025-07-07 20:31:19.852611996 -0400 EDT m=+0.092171444 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:19 managed-node1 auth_test_1_kube-auth_test_1_kube[35279]: This container is intended for podman CI testing Jul 07 20:31:19 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:19 managed-node1 podman[35283]: 2025-07-07 20:31:19.881451267 -0400 EDT m=+0.019768672 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:19 managed-node1 podman[35283]: 2025-07-07 20:31:19.894469235 -0400 EDT m=+0.032786593 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:19 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5783. Jul 07 20:31:19 managed-node1 podman[35283]: 2025-07-07 20:31:19.947127502 -0400 EDT m=+0.085444912 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:19 managed-node1 auth_test_1_kube-auth_test_1_kube[35295]: This container is intended for podman CI testing Jul 07 20:31:19 managed-node1 podman[35283]: 2025-07-07 20:31:19.949981816 -0400 EDT m=+0.088299203 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:19 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:19 managed-node1 podman[35299]: 2025-07-07 20:31:19.977641859 -0400 EDT m=+0.019122198 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:19 managed-node1 podman[35299]: 2025-07-07 20:31:19.989990665 -0400 EDT m=+0.031470962 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:20 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5790. Jul 07 20:31:20 managed-node1 podman[35299]: 2025-07-07 20:31:20.03595698 -0400 EDT m=+0.077437358 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:20 managed-node1 auth_test_1_kube-auth_test_1_kube[35310]: This container is intended for podman CI testing Jul 07 20:31:20 managed-node1 podman[35299]: 2025-07-07 20:31:20.038750281 -0400 EDT m=+0.080230711 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:31:20 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:20 managed-node1 podman[35314]: 2025-07-07 20:31:20.069430734 -0400 EDT m=+0.019157221 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:20 managed-node1 podman[35314]: 2025-07-07 20:31:20.082223442 -0400 EDT m=+0.031949882 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:20 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5797. Jul 07 20:31:20 managed-node1 podman[35314]: 2025-07-07 20:31:20.125758126 -0400 EDT m=+0.075484648 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:20 managed-node1 podman[35314]: 2025-07-07 20:31:20.127991237 -0400 EDT m=+0.077717738 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:31:20 managed-node1 auth_test_1_kube-auth_test_1_kube[35325]: This container is intended for podman CI testing Jul 07 20:31:20 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:20 managed-node1 podman[35329]: 2025-07-07 20:31:20.173287508 -0400 EDT m=+0.031612849 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:20 managed-node1 podman[35329]: 2025-07-07 20:31:20.186746552 -0400 EDT m=+0.045071956 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:20 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5804. Jul 07 20:31:20 managed-node1 podman[35329]: 2025-07-07 20:31:20.23913445 -0400 EDT m=+0.097459723 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:20 managed-node1 podman[35329]: 2025-07-07 20:31:20.244356296 -0400 EDT m=+0.102681531 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:20 managed-node1 auth_test_1_kube-auth_test_1_kube[35396]: This container is intended for podman CI testing Jul 07 20:31:20 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:20 managed-node1 podman[35424]: 2025-07-07 20:31:20.287762482 -0400 EDT m=+0.028376300 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:20 managed-node1 podman[35424]: 2025-07-07 20:31:20.30096729 -0400 EDT m=+0.041580947 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:20 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5811. Jul 07 20:31:20 managed-node1 podman[35424]: 2025-07-07 20:31:20.391073306 -0400 EDT m=+0.131687162 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:20 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:20 managed-node1 conmon[35487]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events Jul 07 20:31:20 managed-node1 auth_test_1_kube-auth_test_1_kube[35487]: This container is intended for podman CI testing Jul 07 20:31:20 managed-node1 podman[35424]: 2025-07-07 20:31:20.397762343 -0400 EDT m=+0.138376159 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:20 managed-node1 podman[35492]: 2025-07-07 20:31:20.447438415 -0400 EDT m=+0.034439229 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:20 managed-node1 podman[35492]: 2025-07-07 20:31:20.461905103 -0400 EDT m=+0.048905869 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:20 managed-node1 python3.12[35489]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:31:20 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5818. Jul 07 20:31:20 managed-node1 podman[35492]: 2025-07-07 20:31:20.533894439 -0400 EDT m=+0.120895390 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:20 managed-node1 podman[35492]: 2025-07-07 20:31:20.537755393 -0400 EDT m=+0.124756070 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:20 managed-node1 auth_test_1_kube-auth_test_1_kube[35503]: This container is intended for podman CI testing Jul 07 20:31:20 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:20 managed-node1 conmon[35503]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events Jul 07 20:31:20 managed-node1 podman[35510]: 2025-07-07 20:31:20.584899484 -0400 EDT m=+0.036017052 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:31:20 managed-node1 podman[35510]: 2025-07-07 20:31:20.599680844 -0400 EDT m=+0.050798057 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:20 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5825. Jul 07 20:31:20 managed-node1 podman[35510]: 2025-07-07 20:31:20.65090971 -0400 EDT m=+0.102027154 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:20 managed-node1 auth_test_1_kube-auth_test_1_kube[35546]: This container is intended for podman CI testing Jul 07 20:31:20 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:20 managed-node1 podman[35510]: 2025-07-07 20:31:20.655609593 -0400 EDT m=+0.106726924 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:20 managed-node1 podman[35550]: 2025-07-07 20:31:20.686911258 -0400 EDT m=+0.019577458 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:20 managed-node1 podman[35550]: 2025-07-07 20:31:20.6980022 -0400 EDT m=+0.030668224 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:20 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5832. Jul 07 20:31:20 managed-node1 podman[35550]: 2025-07-07 20:31:20.740675424 -0400 EDT m=+0.073341499 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:20 managed-node1 podman[35550]: 2025-07-07 20:31:20.743551454 -0400 EDT m=+0.076217500 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:20 managed-node1 auth_test_1_kube-auth_test_1_kube[35562]: This container is intended for podman CI testing Jul 07 20:31:20 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:20 managed-node1 podman[35566]: 2025-07-07 20:31:20.777841216 -0400 EDT m=+0.019250596 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:20 managed-node1 podman[35566]: 2025-07-07 20:31:20.790073477 -0400 EDT m=+0.031482829 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:20 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5839. Jul 07 20:31:20 managed-node1 podman[35566]: 2025-07-07 20:31:20.835978316 -0400 EDT m=+0.077387778 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:20 managed-node1 podman[35566]: 2025-07-07 20:31:20.838343884 -0400 EDT m=+0.079753324 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:20 managed-node1 auth_test_1_kube-auth_test_1_kube[35577]: This container is intended for podman CI testing Jul 07 20:31:20 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:20 managed-node1 podman[35581]: 2025-07-07 20:31:20.871022031 -0400 EDT m=+0.020388383 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:20 managed-node1 podman[35581]: 2025-07-07 20:31:20.883687135 -0400 EDT m=+0.033053449 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:31:20 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5846. Jul 07 20:31:20 managed-node1 podman[35581]: 2025-07-07 20:31:20.927067348 -0400 EDT m=+0.076433717 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:20 managed-node1 podman[35581]: 2025-07-07 20:31:20.929368311 -0400 EDT m=+0.078734664 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:20 managed-node1 auth_test_1_kube-auth_test_1_kube[35592]: This container is intended for podman CI testing Jul 07 20:31:20 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:20 managed-node1 podman[35596]: 2025-07-07 20:31:20.959533941 -0400 EDT m=+0.021159359 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:20 managed-node1 podman[35596]: 2025-07-07 20:31:20.972214012 -0400 EDT m=+0.033839418 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:21 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5853. Jul 07 20:31:21 managed-node1 podman[35596]: 2025-07-07 20:31:21.030229346 -0400 EDT m=+0.091854758 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:21 managed-node1 podman[35596]: 2025-07-07 20:31:21.032531933 -0400 EDT m=+0.094157437 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:21 managed-node1 auth_test_1_kube-auth_test_1_kube[35607]: This container is intended for podman CI testing Jul 07 20:31:21 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:21 managed-node1 podman[35611]: 2025-07-07 20:31:21.063369026 -0400 EDT m=+0.019165461 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:21 managed-node1 podman[35611]: 2025-07-07 20:31:21.075787463 -0400 EDT m=+0.031583907 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:21 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5860. Jul 07 20:31:21 managed-node1 podman[35611]: 2025-07-07 20:31:21.119252494 -0400 EDT m=+0.075048953 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:21 managed-node1 podman[35611]: 2025-07-07 20:31:21.121516588 -0400 EDT m=+0.077313068 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:21 managed-node1 auth_test_1_kube-auth_test_1_kube[35624]: This container is intended for podman CI testing Jul 07 20:31:21 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:21 managed-node1 podman[35628]: 2025-07-07 20:31:21.155387475 -0400 EDT m=+0.020243992 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:21 managed-node1 podman[35628]: 2025-07-07 20:31:21.167967405 -0400 EDT m=+0.032823945 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:21 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5867. Jul 07 20:31:21 managed-node1 podman[35628]: 2025-07-07 20:31:21.21209413 -0400 EDT m=+0.076950725 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:21 managed-node1 podman[35628]: 2025-07-07 20:31:21.21496234 -0400 EDT m=+0.079818903 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:21 managed-node1 auth_test_1_kube-auth_test_1_kube[35639]: This container is intended for podman CI testing Jul 07 20:31:21 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:21 managed-node1 conmon[35639]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events Jul 07 20:31:21 managed-node1 podman[35643]: 2025-07-07 20:31:21.247600124 -0400 EDT m=+0.019629048 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:21 managed-node1 podman[35643]: 2025-07-07 20:31:21.260246588 -0400 EDT m=+0.032275488 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:21 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5874. Jul 07 20:31:21 managed-node1 podman[35643]: 2025-07-07 20:31:21.303265254 -0400 EDT m=+0.075294202 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:21 managed-node1 podman[35643]: 2025-07-07 20:31:21.305512693 -0400 EDT m=+0.077541623 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:31:21 managed-node1 auth_test_1_kube-auth_test_1_kube[35655]: This container is intended for podman CI testing Jul 07 20:31:21 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:21 managed-node1 podman[35659]: 2025-07-07 20:31:21.338665243 -0400 EDT m=+0.020317934 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:31:21 managed-node1 podman[35659]: 2025-07-07 20:31:21.351272249 -0400 EDT m=+0.032924968 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:21 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5881. Jul 07 20:31:21 managed-node1 podman[35659]: 2025-07-07 20:31:21.401104004 -0400 EDT m=+0.082757238 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:21 managed-node1 podman[35659]: 2025-07-07 20:31:21.40542619 -0400 EDT m=+0.087078930 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:21 managed-node1 auth_test_1_kube-auth_test_1_kube[35671]: This container is intended for podman CI testing Jul 07 20:31:21 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:21 managed-node1 podman[35699]: 2025-07-07 20:31:21.453660664 -0400 EDT m=+0.035257593 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:21 managed-node1 podman[35699]: 2025-07-07 20:31:21.467879499 -0400 EDT m=+0.049476440 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:21 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5888. Jul 07 20:31:21 managed-node1 podman[35699]: 2025-07-07 20:31:21.524192051 -0400 EDT m=+0.105789182 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:21 managed-node1 podman[35699]: 2025-07-07 20:31:21.52952801 -0400 EDT m=+0.111125008 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:21 managed-node1 auth_test_1_kube-auth_test_1_kube[35764]: This container is intended for podman CI testing Jul 07 20:31:21 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:21 managed-node1 podman[35788]: 2025-07-07 20:31:21.568220445 -0400 EDT m=+0.024930216 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:21 managed-node1 podman[35788]: 2025-07-07 20:31:21.582776916 -0400 EDT m=+0.039486887 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:21 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5895. Jul 07 20:31:21 managed-node1 podman[35788]: 2025-07-07 20:31:21.659098135 -0400 EDT m=+0.115808088 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:21 managed-node1 auth_test_1_kube-auth_test_1_kube[35831]: This container is intended for podman CI testing Jul 07 20:31:21 managed-node1 podman[35788]: 2025-07-07 20:31:21.662979243 -0400 EDT m=+0.119689160 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:21 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:21 managed-node1 podman[35835]: 2025-07-07 20:31:21.712431425 -0400 EDT m=+0.032457509 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:21 managed-node1 podman[35835]: 2025-07-07 20:31:21.727152622 -0400 EDT m=+0.047178689 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:31:21 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5902. Jul 07 20:31:21 managed-node1 python3.12[35829]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:31:21 managed-node1 podman[35835]: 2025-07-07 20:31:21.787507833 -0400 EDT m=+0.107533916 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:21 managed-node1 auth_test_1_kube-auth_test_1_kube[35846]: This container is intended for podman CI testing Jul 07 20:31:21 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:21 managed-node1 podman[35835]: 2025-07-07 20:31:21.793169961 -0400 EDT m=+0.113195967 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:21 managed-node1 podman[35851]: 2025-07-07 20:31:21.827312399 -0400 EDT m=+0.024335220 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:31:21 managed-node1 podman[35851]: 2025-07-07 20:31:21.84144561 -0400 EDT m=+0.038468441 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:21 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5909. Jul 07 20:31:21 managed-node1 podman[35851]: 2025-07-07 20:31:21.89141785 -0400 EDT m=+0.088440682 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:31:21 managed-node1 podman[35851]: 2025-07-07 20:31:21.89363808 -0400 EDT m=+0.090660834 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:21 managed-node1 auth_test_1_kube-auth_test_1_kube[35887]: This container is intended for podman CI testing Jul 07 20:31:21 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:21 managed-node1 podman[35891]: 2025-07-07 20:31:21.923398352 -0400 EDT m=+0.021182183 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:21 managed-node1 podman[35891]: 2025-07-07 20:31:21.936001816 -0400 EDT m=+0.033785637 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:21 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5916. Jul 07 20:31:21 managed-node1 podman[35891]: 2025-07-07 20:31:21.987441604 -0400 EDT m=+0.085225415 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:21 managed-node1 podman[35891]: 2025-07-07 20:31:21.990929649 -0400 EDT m=+0.088713552 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:21 managed-node1 auth_test_1_kube-auth_test_1_kube[35902]: This container is intended for podman CI testing Jul 07 20:31:21 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:22 managed-node1 podman[35906]: 2025-07-07 20:31:22.0218562 -0400 EDT m=+0.017984372 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:22 managed-node1 podman[35906]: 2025-07-07 20:31:22.034844415 -0400 EDT m=+0.030972531 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:22 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5923. Jul 07 20:31:22 managed-node1 podman[35906]: 2025-07-07 20:31:22.091025046 -0400 EDT m=+0.087153210 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:22 managed-node1 podman[35906]: 2025-07-07 20:31:22.094799693 -0400 EDT m=+0.090928157 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:22 managed-node1 auth_test_1_kube-auth_test_1_kube[35931]: This container is intended for podman CI testing Jul 07 20:31:22 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:22 managed-node1 podman[35954]: 2025-07-07 20:31:22.139027999 -0400 EDT m=+0.028672465 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:22 managed-node1 podman[35954]: 2025-07-07 20:31:22.154020141 -0400 EDT m=+0.043664742 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:31:22 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5930. Jul 07 20:31:22 managed-node1 podman[35954]: 2025-07-07 20:31:22.206647976 -0400 EDT m=+0.096292424 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:22 managed-node1 podman[35954]: 2025-07-07 20:31:22.209597636 -0400 EDT m=+0.099241976 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:22 managed-node1 auth_test_1_kube-auth_test_1_kube[36011]: This container is intended for podman CI testing Jul 07 20:31:22 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:22 managed-node1 podman[36040]: 2025-07-07 20:31:22.259693095 -0400 EDT m=+0.034188439 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:22 managed-node1 podman[36040]: 2025-07-07 20:31:22.269510422 -0400 EDT m=+0.044005342 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:22 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5937. Jul 07 20:31:22 managed-node1 podman[36040]: 2025-07-07 20:31:22.329965247 -0400 EDT m=+0.104460206 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:22 managed-node1 auth_test_1_kube-auth_test_1_kube[36079]: This container is intended for podman CI testing Jul 07 20:31:22 managed-node1 podman[36040]: 2025-07-07 20:31:22.334452532 -0400 EDT m=+0.108947421 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:22 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:22 managed-node1 conmon[36079]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events Jul 07 20:31:22 managed-node1 podman[36083]: 2025-07-07 20:31:22.384871133 -0400 EDT m=+0.032627544 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:22 managed-node1 podman[36083]: 2025-07-07 20:31:22.401248506 -0400 EDT m=+0.049004881 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:22 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5944. Jul 07 20:31:22 managed-node1 podman[36083]: 2025-07-07 20:31:22.521351163 -0400 EDT m=+0.169107630 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:22 managed-node1 podman[36083]: 2025-07-07 20:31:22.527989504 -0400 EDT m=+0.175745785 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:22 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:22 managed-node1 auth_test_1_kube-auth_test_1_kube[36094]: This container is intended for podman CI testing Jul 07 20:31:22 managed-node1 conmon[36094]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events Jul 07 20:31:22 managed-node1 podman[36098]: 2025-07-07 20:31:22.583976206 -0400 EDT m=+0.037081633 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:22 managed-node1 python3.12[36073]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None Jul 07 20:31:22 managed-node1 podman[36098]: 2025-07-07 20:31:22.59764438 -0400 EDT m=+0.050750034 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:22 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5951. Jul 07 20:31:22 managed-node1 systemd[1]: Reload requested from client PID 36110 ('systemctl') (unit session-5.scope)... Jul 07 20:31:22 managed-node1 systemd[1]: Reloading... Jul 07 20:31:22 managed-node1 podman[36098]: 2025-07-07 20:31:22.659692815 -0400 EDT m=+0.112798231 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:31:22 managed-node1 podman[36098]: 2025-07-07 20:31:22.666902581 -0400 EDT m=+0.120007940 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:31:22 managed-node1 auth_test_1_kube-auth_test_1_kube[36112]: This container is intended for podman CI testing Jul 07 20:31:22 managed-node1 podman[36134]: 2025-07-07 20:31:22.737682873 -0400 EDT m=+0.047915242 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:22 managed-node1 podman[36134]: 2025-07-07 20:31:22.752454858 -0400 EDT m=+0.062687129 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:22 managed-node1 systemd-rc-local-generator[36160]: /etc/rc.d/rc.local is not marked executable, skipping. Jul 07 20:31:22 managed-node1 systemd[1]: Reloading finished in 243 ms. Jul 07 20:31:22 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:22 managed-node1 systemd[1]: Stopping podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service - A template for running K8s workloads via podman-kube-play... â–‘â–‘ Subject: A stop job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service has begun execution â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A stop job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service has begun execution. â–‘â–‘ â–‘â–‘ The job identifier is 5958. Jul 07 20:31:22 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5959. Jul 07 20:31:22 managed-node1 podman[36134]: 2025-07-07 20:31:22.993835809 -0400 EDT m=+0.304068216 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:22 managed-node1 auth_test_1_kube-auth_test_1_kube[36185]: This container is intended for podman CI testing Jul 07 20:31:23 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state. Jul 07 20:31:23 managed-node1 conmon[36185]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events Jul 07 20:31:23 managed-node1 podman[36134]: 2025-07-07 20:31:23.002373113 -0400 EDT m=+0.312605386 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:31:23 managed-node1 podman[36183]: 2025-07-07 20:31:23.012615451 -0400 EDT m=+0.039513819 pod stop 8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd (image=, name=auth_test_1_kube) Jul 07 20:31:23 managed-node1 podman[36183]: 2025-07-07 20:31:23.013929002 -0400 EDT m=+0.040827806 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:23 managed-node1 systemd[1]: var-lib-containers-storage-overlay-343675d1c7962339170c5823c97d41383140939d7ff4380f29012cdee14c2784-merged.mount: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit var-lib-containers-storage-overlay-343675d1c7962339170c5823c97d41383140939d7ff4380f29012cdee14c2784-merged.mount has successfully entered the 'dead' state. Jul 07 20:31:23 managed-node1 podman[36183]: 2025-07-07 20:31:23.058780023 -0400 EDT m=+0.085678303 container cleanup 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:31:23 managed-node1 systemd[1]: libpod-62f9d4997b7b62b31bc6de8fed554fd83031d2fc9306d738f5c6b7ca27523eba.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-62f9d4997b7b62b31bc6de8fed554fd83031d2fc9306d738f5c6b7ca27523eba.scope has successfully entered the 'dead' state. Jul 07 20:31:23 managed-node1 podman[36183]: 2025-07-07 20:31:23.078759519 -0400 EDT m=+0.105657943 container died 62f9d4997b7b62b31bc6de8fed554fd83031d2fc9306d738f5c6b7ca27523eba (image=, name=8f7847ede084-infra, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:23 managed-node1 systemd[1]: run-p20191-i20491.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit run-p20191-i20491.scope has successfully entered the 'dead' state. Jul 07 20:31:23 managed-node1 kernel: podman1: port 1(veth1) entered disabled state Jul 07 20:31:23 managed-node1 kernel: veth1 (unregistering): left allmulticast mode Jul 07 20:31:23 managed-node1 kernel: veth1 (unregistering): left promiscuous mode Jul 07 20:31:23 managed-node1 kernel: podman1: port 1(veth1) entered disabled state Jul 07 20:31:23 managed-node1 NetworkManager[716]: [1751934683.1218] device (podman1): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Jul 07 20:31:23 managed-node1 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... â–‘â–‘ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit NetworkManager-dispatcher.service has begun execution. â–‘â–‘ â–‘â–‘ The job identifier is 5966. Jul 07 20:31:23 managed-node1 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. â–‘â–‘ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit NetworkManager-dispatcher.service has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5966. Jul 07 20:31:23 managed-node1 systemd[1]: run-netns-netns\x2d71306de9\x2df283\x2db61f\x2d4a69\x2db69d27093002.mount: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit run-netns-netns\x2d71306de9\x2df283\x2db61f\x2d4a69\x2db69d27093002.mount has successfully entered the 'dead' state. Jul 07 20:31:23 managed-node1 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-62f9d4997b7b62b31bc6de8fed554fd83031d2fc9306d738f5c6b7ca27523eba-rootfs-merge.mount: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit var-lib-containers-storage-overlay\x2dcontainers-62f9d4997b7b62b31bc6de8fed554fd83031d2fc9306d738f5c6b7ca27523eba-rootfs-merge.mount has successfully entered the 'dead' state. Jul 07 20:31:23 managed-node1 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-62f9d4997b7b62b31bc6de8fed554fd83031d2fc9306d738f5c6b7ca27523eba-userdata-shm.mount: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit var-lib-containers-storage-overlay\x2dcontainers-62f9d4997b7b62b31bc6de8fed554fd83031d2fc9306d738f5c6b7ca27523eba-userdata-shm.mount has successfully entered the 'dead' state. Jul 07 20:31:23 managed-node1 podman[36183]: 2025-07-07 20:31:23.183498632 -0400 EDT m=+0.210396942 container cleanup 62f9d4997b7b62b31bc6de8fed554fd83031d2fc9306d738f5c6b7ca27523eba (image=, name=8f7847ede084-infra, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:23 managed-node1 systemd[1]: Removed slice machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice - cgroup machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice. â–‘â–‘ Subject: A stop job for unit machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice has finished â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A stop job for unit machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice has finished. â–‘â–‘ â–‘â–‘ The job identifier is 6045 and the job result is done. Jul 07 20:31:23 managed-node1 systemd[1]: machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice: Consumed 1.857s CPU time, 1M memory peak. â–‘â–‘ Subject: Resources consumed by unit runtime â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice completed and consumed the indicated resources. Jul 07 20:31:23 managed-node1 podman[36183]: 2025-07-07 20:31:23.212685884 -0400 EDT m=+0.239584198 container remove 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:31:23 managed-node1 podman[36183]: 2025-07-07 20:31:23.238925424 -0400 EDT m=+0.265823745 container remove 62f9d4997b7b62b31bc6de8fed554fd83031d2fc9306d738f5c6b7ca27523eba (image=, name=8f7847ede084-infra, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:23 managed-node1 systemd[1]: machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice: Failed to open /run/systemd/transient/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice: No such file or directory Jul 07 20:31:23 managed-node1 podman[36183]: 2025-07-07 20:31:23.24814718 -0400 EDT m=+0.275045457 pod remove 8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd (image=, name=auth_test_1_kube) Jul 07 20:31:23 managed-node1 podman[36183]: 2025-07-07 20:31:23.251227715 -0400 EDT m=+0.278126263 container kill c378f913cea4ae57a9dcec0c838333f43982d213f895ef3f3c6af3dab5cfecc9 (image=, name=3ef6fcac6278-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:23 managed-node1 systemd[1]: libpod-c378f913cea4ae57a9dcec0c838333f43982d213f895ef3f3c6af3dab5cfecc9.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-c378f913cea4ae57a9dcec0c838333f43982d213f895ef3f3c6af3dab5cfecc9.scope has successfully entered the 'dead' state. Jul 07 20:31:23 managed-node1 conmon[20171]: conmon c378f913cea4ae57a9dc : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/libpod-c378f913cea4ae57a9dcec0c838333f43982d213f895ef3f3c6af3dab5cfecc9.scope/container/memory.events Jul 07 20:31:23 managed-node1 podman[36183]: 2025-07-07 20:31:23.259097255 -0400 EDT m=+0.285995664 container died c378f913cea4ae57a9dcec0c838333f43982d213f895ef3f3c6af3dab5cfecc9 (image=, name=3ef6fcac6278-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:23 managed-node1 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-c378f913cea4ae57a9dcec0c838333f43982d213f895ef3f3c6af3dab5cfecc9-rootfs-merge.mount: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit var-lib-containers-storage-overlay\x2dcontainers-c378f913cea4ae57a9dcec0c838333f43982d213f895ef3f3c6af3dab5cfecc9-rootfs-merge.mount has successfully entered the 'dead' state. Jul 07 20:31:23 managed-node1 podman[36183]: 2025-07-07 20:31:23.3241034 -0400 EDT m=+0.351001711 container remove c378f913cea4ae57a9dcec0c838333f43982d213f895ef3f3c6af3dab5cfecc9 (image=, name=3ef6fcac6278-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:31:23 managed-node1 podman[36183]: Pods stopped: Jul 07 20:31:23 managed-node1 podman[36183]: 8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd Jul 07 20:31:23 managed-node1 podman[36183]: Pods removed: Jul 07 20:31:23 managed-node1 podman[36183]: 8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd Jul 07 20:31:23 managed-node1 podman[36183]: Secrets removed: Jul 07 20:31:23 managed-node1 podman[36183]: Volumes removed: Jul 07 20:31:23 managed-node1 systemd[1]: podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service has successfully entered the 'dead' state. Jul 07 20:31:23 managed-node1 systemd[1]: Stopped podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service - A template for running K8s workloads via podman-kube-play. â–‘â–‘ Subject: A stop job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service has finished â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A stop job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service has finished. â–‘â–‘ â–‘â–‘ The job identifier is 5958 and the job result is done. Jul 07 20:31:23 managed-node1 systemd[1]: podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service: Consumed 33.557s CPU time, 37M memory peak. â–‘â–‘ Subject: Resources consumed by unit runtime â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service completed and consumed the indicated resources. Jul 07 20:31:23 managed-node1 python3.12[36402]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:31:24 managed-node1 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-c378f913cea4ae57a9dcec0c838333f43982d213f895ef3f3c6af3dab5cfecc9-userdata-shm.mount: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit var-lib-containers-storage-overlay\x2dcontainers-c378f913cea4ae57a9dcec0c838333f43982d213f895ef3f3c6af3dab5cfecc9-userdata-shm.mount has successfully entered the 'dead' state. Jul 07 20:31:24 managed-node1 python3.12[36559]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 07 20:31:24 managed-node1 python3.12[36559]: ansible-containers.podman.podman_play version: 5.5.1, kube file /etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml Jul 07 20:31:24 managed-node1 python3.12[36727]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:31:25 managed-node1 python3.12[36882]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:31:27 managed-node1 python3.12[37039]: ansible-systemd Invoked with name=auth_test_1_quadlet.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jul 07 20:31:27 managed-node1 systemd[1]: Reload requested from client PID 37042 ('systemctl') (unit session-5.scope)... Jul 07 20:31:27 managed-node1 systemd[1]: Reloading... Jul 07 20:31:27 managed-node1 systemd-rc-local-generator[37081]: /etc/rc.d/rc.local is not marked executable, skipping. Jul 07 20:31:27 managed-node1 systemd[1]: Reloading finished in 193 ms. Jul 07 20:31:27 managed-node1 python3.12[37252]: ansible-stat Invoked with path=/etc/containers/systemd/auth_test_1_quadlet.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:31:28 managed-node1 python3.12[37564]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:31:30 managed-node1 python3.12[37875]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:31:31 managed-node1 python3.12[38036]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:31:33 managed-node1 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jul 07 20:31:34 managed-node1 python3.12[38194]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:31:35 managed-node1 python3.12[38351]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:31:37 managed-node1 python3.12[38508]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:31:38 managed-node1 python3.12[38665]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:31:38 managed-node1 python3.12[38821]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None Jul 07 20:31:39 managed-node1 python3.12[38978]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:31:39 managed-node1 python3.12[39133]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:31:40 managed-node1 python3.12[39288]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:31:41 managed-node1 python3.12[39445]: ansible-systemd Invoked with name=auth_test_1_quadlet.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jul 07 20:31:41 managed-node1 systemd[1]: Reload requested from client PID 39448 ('systemctl') (unit session-5.scope)... Jul 07 20:31:41 managed-node1 systemd[1]: Reloading... Jul 07 20:31:42 managed-node1 systemd-rc-local-generator[39493]: /etc/rc.d/rc.local is not marked executable, skipping. Jul 07 20:31:42 managed-node1 systemd[1]: Reloading finished in 192 ms. Jul 07 20:31:42 managed-node1 python3.12[39657]: ansible-stat Invoked with path=/etc/containers/systemd/auth_test_1_quadlet.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:31:43 managed-node1 python3.12[39969]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:31:45 managed-node1 python3.12[40280]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:31:46 managed-node1 python3.12[40441]: ansible-getent Invoked with database=passwd key=auth_test_user1 fail_key=False service=None split=None Jul 07 20:31:46 managed-node1 python3.12[40597]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:31:47 managed-node1 python3.12[40753]: ansible-user Invoked with name=auth_test_user1 state=absent non_unique=False force=False remove=False create_home=True system=False move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node1 update_password=always uid=None group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None Jul 07 20:31:47 managed-node1 python3.12[40909]: ansible-file Invoked with path=/home/auth_test_user1 state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:31:48 managed-node1 python3.12[41064]: ansible-ansible.legacy.command Invoked with _raw_params=podman inspect podman_registry --format '{{range .}}{{range .Mounts}}{{if eq .Type "volume"}}{{.Name}}{{end}}{{end}}{{end}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:31:48 managed-node1 python3.12[41227]: ansible-ansible.legacy.command Invoked with _raw_params=podman rm -f podman_registry _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:31:48 managed-node1 systemd[1]: libpod-cd37f7eebf3f1a0d1061dbea9630cc5859593ef5dfdb627a7bec92189b0c5ff0.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-cd37f7eebf3f1a0d1061dbea9630cc5859593ef5dfdb627a7bec92189b0c5ff0.scope has successfully entered the 'dead' state. Jul 07 20:31:48 managed-node1 podman[41228]: 2025-07-07 20:31:48.594627818 -0400 EDT m=+0.040810779 container died cd37f7eebf3f1a0d1061dbea9630cc5859593ef5dfdb627a7bec92189b0c5ff0 (image=quay.io/libpod/registry:2.8.2, name=podman_registry) Jul 07 20:31:48 managed-node1 kernel: podman0: port 1(veth0) entered disabled state Jul 07 20:31:48 managed-node1 kernel: veth0 (unregistering): left allmulticast mode Jul 07 20:31:48 managed-node1 kernel: veth0 (unregistering): left promiscuous mode Jul 07 20:31:48 managed-node1 kernel: podman0: port 1(veth0) entered disabled state Jul 07 20:31:48 managed-node1 NetworkManager[716]: [1751934708.6321] device (podman0): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Jul 07 20:31:48 managed-node1 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... â–‘â–‘ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit NetworkManager-dispatcher.service has begun execution. â–‘â–‘ â–‘â–‘ The job identifier is 6046. Jul 07 20:31:48 managed-node1 systemd[1]: run-netns-netns\x2d54766868\x2d56df\x2d359a\x2d3bba\x2d291deccab973.mount: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit run-netns-netns\x2d54766868\x2d56df\x2d359a\x2d3bba\x2d291deccab973.mount has successfully entered the 'dead' state. Jul 07 20:31:48 managed-node1 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. â–‘â–‘ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit NetworkManager-dispatcher.service has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 6046. Jul 07 20:31:48 managed-node1 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-cd37f7eebf3f1a0d1061dbea9630cc5859593ef5dfdb627a7bec92189b0c5ff0-userdata-shm.mount: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit var-lib-containers-storage-overlay\x2dcontainers-cd37f7eebf3f1a0d1061dbea9630cc5859593ef5dfdb627a7bec92189b0c5ff0-userdata-shm.mount has successfully entered the 'dead' state. Jul 07 20:31:48 managed-node1 systemd[1]: var-lib-containers-storage-overlay-90a43350bb4f6c8f9132c8d0276de4bda61dad2186899fe1f5ee6b12d58cb721-merged.mount: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit var-lib-containers-storage-overlay-90a43350bb4f6c8f9132c8d0276de4bda61dad2186899fe1f5ee6b12d58cb721-merged.mount has successfully entered the 'dead' state. Jul 07 20:31:48 managed-node1 podman[41228]: 2025-07-07 20:31:48.718671537 -0400 EDT m=+0.164854466 container remove cd37f7eebf3f1a0d1061dbea9630cc5859593ef5dfdb627a7bec92189b0c5ff0 (image=quay.io/libpod/registry:2.8.2, name=podman_registry) Jul 07 20:31:48 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 07 20:31:48 managed-node1 systemd[1]: libpod-conmon-cd37f7eebf3f1a0d1061dbea9630cc5859593ef5dfdb627a7bec92189b0c5ff0.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-conmon-cd37f7eebf3f1a0d1061dbea9630cc5859593ef5dfdb627a7bec92189b0c5ff0.scope has successfully entered the 'dead' state. Jul 07 20:31:49 managed-node1 python3.12[41421]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume rm 4f496486c0bcf1b762b3273588256ef269fddf531cc488eaf050a62e6391a121 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:31:49 managed-node1 podman[41422]: 2025-07-07 20:31:49.188296836 -0400 EDT m=+0.024296713 volume remove 4f496486c0bcf1b762b3273588256ef269fddf531cc488eaf050a62e6391a121 Jul 07 20:31:49 managed-node1 python3.12[41584]: ansible-file Invoked with path=/tmp/lsr_urjcw20a_podman state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:31:51 managed-node1 python3.12[41790]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Jul 07 20:31:52 managed-node1 python3.12[41974]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:31:53 managed-node1 python3.12[42129]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:31:55 managed-node1 python3.12[42439]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:31:55 managed-node1 python3.12[42600]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 07 20:31:56 managed-node1 python3.12[42756]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:31:58 managed-node1 python3.12[42914]: ansible-tempfile Invoked with state=directory prefix=lsr_podman_config_ suffix= path=None Jul 07 20:31:58 managed-node1 python3.12[43069]: ansible-ansible.legacy.command Invoked with _raw_params=tar --ignore-failed-read -c -P -v -p -f /tmp/lsr_podman_config_acix87c_/backup.tar /etc/containers/containers.conf.d/50-systemroles.conf /etc/containers/registries.conf.d/50-systemroles.conf /etc/containers/storage.conf /etc/containers/policy.json _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:31:58 managed-node1 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jul 07 20:31:59 managed-node1 python3.12[43226]: ansible-user Invoked with name=user1 state=present non_unique=False force=False remove=False create_home=True system=False move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node1 update_password=always uid=None group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None Jul 07 20:31:59 managed-node1 useradd[43228]: new group: name=user1, GID=1000 Jul 07 20:31:59 managed-node1 useradd[43228]: new user: name=user1, UID=1000, GID=1000, home=/home/user1, shell=/bin/bash, from=/dev/pts/0 Jul 07 20:32:01 managed-node1 python3.12[43538]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:32:01 managed-node1 python3.12[43700]: ansible-getent Invoked with database=passwd key=user1 fail_key=False service=None split=None Jul 07 20:32:02 managed-node1 python3.12[43857]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:32:02 managed-node1 python3.12[44014]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:32:03 managed-node1 python3.12[44170]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:32:04 managed-node1 python3.12[44326]: ansible-file Invoked with path=/home/user1/.config/containers/containers.conf.d state=directory owner=user1 group=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:32:04 managed-node1 python3.12[44481]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:32:04 managed-node1 python3.12[44606]: ansible-ansible.legacy.copy Invoked with dest=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf owner=user1 group=user1 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751934724.2204754-13137-245227831768024/.source.conf _original_basename=.90zqvsbs follow=False checksum=b1776092f2908d76e11fd6af87267469b2c17d5a backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:32:05 managed-node1 python3.12[44761]: ansible-file Invoked with path=/home/user1/.config/containers/registries.conf.d state=directory owner=user1 group=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:32:05 managed-node1 python3.12[44916]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:32:06 managed-node1 python3.12[45041]: ansible-ansible.legacy.copy Invoked with dest=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf owner=user1 group=user1 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751934725.5107205-13166-107729205740759/.source.conf _original_basename=.ceqnuqdb follow=False checksum=fde25488ce7040f1639af7bfc88ed125318cc0b0 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:32:06 managed-node1 python3.12[45196]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:32:06 managed-node1 python3.12[45351]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/storage.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:32:07 managed-node1 python3.12[45476]: ansible-ansible.legacy.copy Invoked with dest=/home/user1/.config/containers/storage.conf owner=user1 group=user1 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751934726.699786-13195-91714043369387/.source.conf _original_basename=.31lup0x9 follow=False checksum=38f015f4780579bd388dd955b42916199fd7fe19 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:32:07 managed-node1 python3.12[45631]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:32:08 managed-node1 python3.12[45786]: ansible-stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:32:08 managed-node1 python3.12[45941]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:32:08 managed-node1 python3.12[46066]: ansible-ansible.legacy.copy Invoked with dest=/home/user1/.config/containers/policy.json owner=user1 group=user1 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751934728.243613-13238-43275417211631/.source.json _original_basename=.5gmxjq7z follow=False checksum=6746c079ad563b735fc39f73d4876654b80b0a0d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:32:09 managed-node1 python3.12[46221]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:32:10 managed-node1 python3.12[46378]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:32:10 managed-node1 python3.12[46534]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:32:11 managed-node1 python3.12[46690]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:32:12 managed-node1 python3.12[47125]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:32:13 managed-node1 python3.12[47282]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:32:13 managed-node1 python3.12[47438]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:32:14 managed-node1 python3.12[47594]: ansible-stat Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:32:15 managed-node1 python3.12[47751]: ansible-stat Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:32:15 managed-node1 python3.12[47908]: ansible-stat Invoked with path=/home/user1/.config/containers/storage.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:32:16 managed-node1 python3.12[48065]: ansible-stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:32:16 managed-node1 python3.12[48222]: ansible-ansible.legacy.command Invoked with _raw_params=grep 'container_name_as_hostname[ ]*=[ ]*true' /home/user1/.config/containers/containers.conf.d/50-systemroles.conf _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:32:18 managed-node1 python3.12[48533]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:32:19 managed-node1 python3.12[48694]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:32:19 managed-node1 python3.12[48851]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:32:20 managed-node1 python3.12[49007]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:32:20 managed-node1 python3.12[49163]: ansible-file Invoked with path=/home/user1/.config/containers/containers.conf.d state=directory owner=user1 group=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:32:21 managed-node1 python3.12[49318]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:32:21 managed-node1 python3.12[49396]: ansible-ansible.legacy.file Invoked with owner=user1 group=user1 mode=0644 dest=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf _original_basename=.mja6pw9a recurse=False state=file path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:32:21 managed-node1 python3.12[49551]: ansible-file Invoked with path=/home/user1/.config/containers/registries.conf.d state=directory owner=user1 group=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:32:22 managed-node1 python3.12[49706]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:32:22 managed-node1 python3.12[49784]: ansible-ansible.legacy.file Invoked with owner=user1 group=user1 mode=0644 dest=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf _original_basename=.j2jbew9d recurse=False state=file path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:32:23 managed-node1 python3.12[49939]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:32:23 managed-node1 python3.12[50095]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/storage.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:32:23 managed-node1 python3.12[50173]: ansible-ansible.legacy.file Invoked with owner=user1 group=user1 mode=0644 dest=/home/user1/.config/containers/storage.conf _original_basename=.z_096_sp recurse=False state=file path=/home/user1/.config/containers/storage.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:32:24 managed-node1 python3.12[50328]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:32:24 managed-node1 python3.12[50483]: ansible-stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:32:25 managed-node1 python3.12[50640]: ansible-slurp Invoked with path=/home/user1/.config/containers/policy.json src=/home/user1/.config/containers/policy.json Jul 07 20:32:25 managed-node1 python3.12[50795]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:32:26 managed-node1 python3.12[50953]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:32:26 managed-node1 python3.12[51109]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:32:27 managed-node1 python3.12[51266]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:32:28 managed-node1 python3.12[51654]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:32:29 managed-node1 python3.12[51811]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:32:29 managed-node1 python3.12[51967]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:32:30 managed-node1 python3.12[52123]: ansible-stat Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:32:30 managed-node1 python3.12[52280]: ansible-stat Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:32:31 managed-node1 python3.12[52437]: ansible-stat Invoked with path=/home/user1/.config/containers/storage.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:32:31 managed-node1 python3.12[52594]: ansible-stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:32:33 managed-node1 python3.12[52906]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:32:34 managed-node1 python3.12[53067]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 07 20:32:34 managed-node1 python3.12[53223]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:32:35 managed-node1 python3.12[53380]: ansible-file Invoked with path=/etc/containers/containers.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:32:36 managed-node1 python3.12[53535]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:32:36 managed-node1 python3.12[53660]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/containers.conf.d/50-systemroles.conf owner=root mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751934756.0927734-14109-6617328344651/.source.conf _original_basename=.e_8i3w3o follow=False checksum=b1776092f2908d76e11fd6af87267469b2c17d5a backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:32:37 managed-node1 python3.12[53815]: ansible-file Invoked with path=/etc/containers/registries.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:32:37 managed-node1 python3.12[53970]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:32:37 managed-node1 python3.12[54095]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/registries.conf.d/50-systemroles.conf owner=root mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751934757.322634-14160-134327254316543/.source.conf _original_basename=.yy1ryvyi follow=False checksum=fde25488ce7040f1639af7bfc88ed125318cc0b0 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:32:38 managed-node1 python3.12[54250]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:32:38 managed-node1 python3.12[54405]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:32:39 managed-node1 python3.12[54530]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/storage.conf owner=root mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751934758.6517808-14205-147389732057554/.source.conf _original_basename=.q90e1n48 follow=False checksum=38f015f4780579bd388dd955b42916199fd7fe19 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:32:39 managed-node1 python3.12[54685]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:32:40 managed-node1 python3.12[54840]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:32:40 managed-node1 python3.12[54997]: ansible-slurp Invoked with path=/etc/containers/policy.json src=/etc/containers/policy.json Jul 07 20:32:41 managed-node1 python3.12[55152]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:32:41 managed-node1 python3.12[55279]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/policy.json owner=root mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751934760.7385497-14276-15721655439210/.source.json _original_basename=.355w7cpi follow=False checksum=6746c079ad563b735fc39f73d4876654b80b0a0d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:32:42 managed-node1 python3.12[55434]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:32:43 managed-node1 python3.12[55591]: ansible-file Invoked with path=/root/.config/containers state=directory owner=root group=0 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:32:45 managed-node1 python3.12[56028]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:32:46 managed-node1 python3.12[56185]: ansible-stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:32:47 managed-node1 python3.12[56342]: ansible-stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:32:47 managed-node1 python3.12[56499]: ansible-stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:32:47 managed-node1 python3.12[56656]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:32:50 managed-node1 python3.12[56968]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:32:51 managed-node1 python3.12[57129]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:32:52 managed-node1 python3.12[57286]: ansible-file Invoked with path=/etc/containers/containers.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:32:52 managed-node1 python3.12[57441]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:32:52 managed-node1 python3.12[57519]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/containers.conf.d/50-systemroles.conf _original_basename=.4sbcez6l recurse=False state=file path=/etc/containers/containers.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:32:53 managed-node1 python3.12[57674]: ansible-file Invoked with path=/etc/containers/registries.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:32:53 managed-node1 python3.12[57829]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:32:54 managed-node1 python3.12[57907]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/registries.conf.d/50-systemroles.conf _original_basename=.z1r_2_b3 recurse=False state=file path=/etc/containers/registries.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:32:54 managed-node1 python3.12[58062]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:32:54 managed-node1 python3.12[58217]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:32:55 managed-node1 python3.12[58295]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/storage.conf _original_basename=.e7w1bw_a recurse=False state=file path=/etc/containers/storage.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:32:55 managed-node1 python3.12[58450]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:32:56 managed-node1 python3.12[58605]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:32:56 managed-node1 python3.12[58762]: ansible-slurp Invoked with path=/etc/containers/policy.json src=/etc/containers/policy.json Jul 07 20:32:57 managed-node1 python3.12[58917]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:32:58 managed-node1 python3.12[59074]: ansible-file Invoked with path=/root/.config/containers state=directory owner=root group=0 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:33:00 managed-node1 python3.12[59462]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:33:01 managed-node1 python3.12[59619]: ansible-stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:33:01 managed-node1 python3.12[59776]: ansible-stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:33:02 managed-node1 python3.12[59933]: ansible-stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:33:02 managed-node1 python3.12[60090]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:33:03 managed-node1 python3.12[60247]: ansible-slurp Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf src=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf Jul 07 20:33:03 managed-node1 python3.12[60402]: ansible-slurp Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf src=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf Jul 07 20:33:04 managed-node1 python3.12[60557]: ansible-slurp Invoked with path=/home/user1/.config/containers/storage.conf src=/home/user1/.config/containers/storage.conf Jul 07 20:33:04 managed-node1 python3.12[60712]: ansible-slurp Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf src=/etc/containers/containers.conf.d/50-systemroles.conf Jul 07 20:33:05 managed-node1 python3.12[60867]: ansible-slurp Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf src=/etc/containers/registries.conf.d/50-systemroles.conf Jul 07 20:33:05 managed-node1 python3.12[61022]: ansible-slurp Invoked with path=/etc/containers/storage.conf src=/etc/containers/storage.conf Jul 07 20:33:08 managed-node1 python3.12[61332]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:33:09 managed-node1 python3.12[61493]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:33:10 managed-node1 python3.12[61650]: ansible-file Invoked with path=/etc/containers/containers.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:33:10 managed-node1 python3.12[61805]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:33:11 managed-node1 python3.12[61932]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/containers.conf.d/50-systemroles.conf owner=root mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751934790.4974134-15303-224512391964356/.source.conf _original_basename=.p1xuaqrk follow=False checksum=9694c1d1c700a6435eecf4066b052584f4ee94c0 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:33:11 managed-node1 python3.12[62087]: ansible-file Invoked with path=/etc/containers/registries.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:33:11 managed-node1 python3.12[62243]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:33:12 managed-node1 python3.12[62321]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/registries.conf.d/50-systemroles.conf _original_basename=.6uhx9rju recurse=False state=file path=/etc/containers/registries.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:33:12 managed-node1 python3.12[62476]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:33:13 managed-node1 python3.12[62631]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:33:13 managed-node1 python3.12[62709]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/storage.conf _original_basename=.n46t0ec7 recurse=False state=file path=/etc/containers/storage.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:33:13 managed-node1 python3.12[62864]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:33:14 managed-node1 python3.12[63019]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:33:14 managed-node1 python3.12[63176]: ansible-slurp Invoked with path=/etc/containers/policy.json src=/etc/containers/policy.json Jul 07 20:33:15 managed-node1 python3.12[63331]: ansible-slurp Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf src=/etc/containers/containers.conf.d/50-systemroles.conf Jul 07 20:33:16 managed-node1 python3.12[63487]: ansible-file Invoked with state=absent path=/etc/containers/containers.conf.d/50-systemroles.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:33:16 managed-node1 python3.12[63642]: ansible-file Invoked with state=absent path=/etc/containers/registries.conf.d/50-systemroles.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:33:17 managed-node1 python3.12[63797]: ansible-file Invoked with state=absent path=/etc/containers/storage.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:33:17 managed-node1 python3.12[63952]: ansible-file Invoked with state=absent path=/etc/containers/policy.json recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:33:17 managed-node1 python3.12[64108]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:33:18 managed-node1 python3.12[64263]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:33:18 managed-node1 python3.12[64418]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/storage.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:33:18 managed-node1 python3.12[64573]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/policy.json recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:33:19 managed-node1 python3.12[64728]: ansible-file Invoked with state=absent path=/root/.config/containers/auth.json recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:33:19 managed-node1 python3.12[64883]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/auth.json recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:33:19 managed-node1 python3.12[65038]: ansible-ansible.legacy.command Invoked with _raw_params=tar xfvpP /tmp/lsr_podman_config_acix87c_/backup.tar _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:33:20 managed-node1 python3.12[65194]: ansible-file Invoked with state=absent path=/tmp/lsr_podman_config_acix87c_ recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:33:22 managed-node1 python3.12[65400]: ansible-setup Invoked with gather_subset=['!all', '!min', 'distribution', 'distribution_major_version', 'distribution_version', 'os_family'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Jul 07 20:33:22 managed-node1 python3.12[65557]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:33:22 managed-node1 python3.12[65712]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:33:24 managed-node1 python3.12[66022]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:33:25 managed-node1 python3.12[66184]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 07 20:33:26 managed-node1 python3.12[66340]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:33:29 managed-node1 python3.12[66548]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Jul 07 20:33:32 managed-node1 python3.12[66732]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:33:33 managed-node1 python3.12[66887]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:33:35 managed-node1 python3.12[67197]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:33:35 managed-node1 python3.12[67359]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 07 20:33:36 managed-node1 python3.12[67515]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:33:40 managed-node1 python3.12[67723]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Jul 07 20:33:41 managed-node1 python3.12[67907]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:33:41 managed-node1 python3.12[68062]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:33:43 managed-node1 python3.12[68372]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:33:44 managed-node1 python3.12[68533]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 07 20:33:44 managed-node1 python3.12[68689]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:33:46 managed-node1 python3.12[68846]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:33:48 managed-node1 python3.12[69003]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:33:48 managed-node1 python3.12[69158]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/nopull.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:33:49 managed-node1 python3.12[69283]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1751934828.656423-17050-224685263014320/.source.container dest=/etc/containers/systemd/nopull.container owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=670d64fc68a9768edb20cad26df2acc703542d85 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:33:51 managed-node1 python3.12[69593]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:33:52 managed-node1 python3.12[69754]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:33:54 managed-node1 python3.12[69911]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:33:55 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 07 20:33:55 managed-node1 podman[70077]: 2025-07-07 20:33:55.866838654 -0400 EDT m=+0.019750887 image pull-error this_is_a_bogus_image:latest short-name resolution enforced but cannot prompt without a TTY Jul 07 20:33:55 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 07 20:33:56 managed-node1 python3.12[70238]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:33:56 managed-node1 python3.12[70393]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/bogus.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:33:57 managed-node1 python3.12[70518]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1751934836.4283967-17279-9425111643274/.source.container dest=/etc/containers/systemd/bogus.container owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=1d087e679d135214e8ac9ccaf33b2222916efb7f backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:33:59 managed-node1 python3.12[70828]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:34:00 managed-node1 python3.12[70990]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:34:02 managed-node1 python3.12[71147]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:34:03 managed-node1 python3.12[71304]: ansible-systemd Invoked with name=nopull.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jul 07 20:34:04 managed-node1 python3.12[71460]: ansible-stat Invoked with path=/etc/containers/systemd/nopull.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:34:05 managed-node1 python3.12[71772]: ansible-ansible.legacy.command Invoked with _raw_params=set -x set -o pipefail exec 1>&2 #podman volume rm --all #podman network prune -f podman volume ls podman network ls podman secret ls podman container ls podman pod ls podman images systemctl list-units | grep quadlet _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:34:05 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 07 20:34:05 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 07 20:34:06 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 07 20:34:07 managed-node1 python3.12[72130]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:34:08 managed-node1 python3.12[72291]: ansible-getent Invoked with database=passwd key=user_quadlet_basic fail_key=False service=None split=None Jul 07 20:34:08 managed-node1 python3.12[72447]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None PLAY RECAP ********************************************************************* managed-node1 : ok=110 changed=2 unreachable=0 failed=2 skipped=190 rescued=2 ignored=0 SYSTEM ROLES ERRORS BEGIN v1 [ { "ansible_version": "2.17.12", "end_time": "2025-07-08T00:34:04.784422+00:00Z", "host": "managed-node1", "message": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "start_time": "2025-07-08T00:34:04.765375+00:00Z", "task_name": "Parse quadlet file", "task_path": "/tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12" }, { "ansible_version": "2.17.12", "delta": "0:00:00.162374", "end_time": "2025-07-07 20:34:05.266728", "host": "managed-node1", "message": "non-zero return code", "rc": 1, "start_time": "2025-07-07 20:34:05.104354", "stderr": "+ set -o pipefail\n+ exec\n+ podman volume ls\nDRIVER VOLUME NAME\n+ podman network ls\nNETWORK ID NAME DRIVER\n2f259bab93aa podman bridge\n4adc89c3d61f podman-default-kube-network bridge\n+ podman secret ls\nID NAME DRIVER CREATED UPDATED\n+ podman container ls\nCONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n+ podman pod ls\nPOD ID NAME STATUS CREATED INFRA ID # OF CONTAINERS\n+ podman images\nREPOSITORY TAG IMAGE ID CREATED SIZE\nquay.io/libpod/registry 2.8.2 0030ba3d620c 23 months ago 24.6 MB\nlocalhost:5000/libpod/testimage 20210610 9f9ec7f2fdef 4 years ago 7.99 MB\nquay.io/libpod/testimage 20210610 9f9ec7f2fdef 4 years ago 7.99 MB\n+ systemctl list-units\n+ grep quadlet", "task_name": "Debug3", "task_path": "/tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:270" }, { "ansible_version": "2.17.12", "end_time": "2025-07-08T00:34:08.344168+00:00Z", "host": "managed-node1", "message": "The given podman user [user_quadlet_basic] does not exist - cannot continue\n", "start_time": "2025-07-08T00:34:08.323295+00:00Z", "task_name": "Fail if user does not exist", "task_path": "/tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9" }, { "ansible_version": "2.17.12", "delta": "0:00:00.037435", "end_time": "2025-07-07 20:34:08.697461", "host": "managed-node1", "message": "", "rc": 0, "start_time": "2025-07-07 20:34:08.660026", "stdout": "Jul 07 20:31:12 managed-node1 podman[33529]: 2025-07-07 20:31:12.683428814 -0400 EDT m=+0.105333506 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:12 managed-node1 auth_test_1_kube-auth_test_1_kube[33562]: This container is intended for podman CI testing\nJul 07 20:31:12 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:12 managed-node1 podman[33570]: 2025-07-07 20:31:12.714820753 -0400 EDT m=+0.020442335 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:12 managed-node1 podman[33570]: 2025-07-07 20:31:12.727739741 -0400 EDT m=+0.033361322 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:12 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5293.\nJul 07 20:31:12 managed-node1 podman[33570]: 2025-07-07 20:31:12.772309131 -0400 EDT m=+0.077930730 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:12 managed-node1 podman[33570]: 2025-07-07 20:31:12.774539798 -0400 EDT m=+0.080161434 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:12 managed-node1 auth_test_1_kube-auth_test_1_kube[33581]: This container is intended for podman CI testing\nJul 07 20:31:12 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:12 managed-node1 conmon[33581]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events\nJul 07 20:31:12 managed-node1 podman[33585]: 2025-07-07 20:31:12.80302532 -0400 EDT m=+0.019217516 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:12 managed-node1 podman[33585]: 2025-07-07 20:31:12.815878318 -0400 EDT m=+0.032070420 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:12 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5300.\nJul 07 20:31:12 managed-node1 podman[33585]: 2025-07-07 20:31:12.858892733 -0400 EDT m=+0.075084844 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:31:12 managed-node1 podman[33585]: 2025-07-07 20:31:12.861608858 -0400 EDT m=+0.077801000 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:12 managed-node1 auth_test_1_kube-auth_test_1_kube[33597]: This container is intended for podman CI testing\nJul 07 20:31:12 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:12 managed-node1 conmon[33597]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events\nJul 07 20:31:12 managed-node1 podman[33601]: 2025-07-07 20:31:12.894645384 -0400 EDT m=+0.019661678 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:12 managed-node1 podman[33601]: 2025-07-07 20:31:12.907221489 -0400 EDT m=+0.032237740 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:31:12 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5307.\nJul 07 20:31:12 managed-node1 podman[33601]: 2025-07-07 20:31:12.964011098 -0400 EDT m=+0.089027366 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:12 managed-node1 podman[33601]: 2025-07-07 20:31:12.966891903 -0400 EDT m=+0.091908222 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:12 managed-node1 auth_test_1_kube-auth_test_1_kube[33612]: This container is intended for podman CI testing\nJul 07 20:31:12 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:12 managed-node1 podman[33616]: 2025-07-07 20:31:12.998349231 -0400 EDT m=+0.019035510 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:13 managed-node1 podman[33616]: 2025-07-07 20:31:13.011136143 -0400 EDT m=+0.031822176 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:13 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5314.\nJul 07 20:31:13 managed-node1 podman[33616]: 2025-07-07 20:31:13.061983984 -0400 EDT m=+0.082670077 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:13 managed-node1 podman[33616]: 2025-07-07 20:31:13.064744859 -0400 EDT m=+0.085430930 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:13 managed-node1 auth_test_1_kube-auth_test_1_kube[33628]: This container is intended for podman CI testing\nJul 07 20:31:13 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:13 managed-node1 podman[33656]: 2025-07-07 20:31:13.110348205 -0400 EDT m=+0.035549767 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:13 managed-node1 podman[33656]: 2025-07-07 20:31:13.123449504 -0400 EDT m=+0.048650883 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:13 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5321.\nJul 07 20:31:13 managed-node1 podman[33656]: 2025-07-07 20:31:13.172982151 -0400 EDT m=+0.098183549 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:13 managed-node1 podman[33656]: 2025-07-07 20:31:13.175131488 -0400 EDT m=+0.100332897 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:13 managed-node1 auth_test_1_kube-auth_test_1_kube[33721]: This container is intended for podman CI testing\nJul 07 20:31:13 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:13 managed-node1 podman[33725]: 2025-07-07 20:31:13.219388663 -0400 EDT m=+0.030083614 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:31:13 managed-node1 podman[33725]: 2025-07-07 20:31:13.23314399 -0400 EDT m=+0.043839011 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:13 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5328.\nJul 07 20:31:13 managed-node1 podman[33725]: 2025-07-07 20:31:13.309022917 -0400 EDT m=+0.119718030 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:13 managed-node1 auth_test_1_kube-auth_test_1_kube[33789]: This container is intended for podman CI testing\nJul 07 20:31:13 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:13 managed-node1 podman[33725]: 2025-07-07 20:31:13.315211739 -0400 EDT m=+0.125906753 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:13 managed-node1 conmon[33789]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events\nJul 07 20:31:13 managed-node1 podman[33793]: 2025-07-07 20:31:13.364116084 -0400 EDT m=+0.033666071 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:13 managed-node1 podman[33793]: 2025-07-07 20:31:13.379887466 -0400 EDT m=+0.049437393 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:13 managed-node1 python3.12[33786]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:31:13 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5335.\nJul 07 20:31:13 managed-node1 podman[33793]: 2025-07-07 20:31:13.447336083 -0400 EDT m=+0.116886092 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:13 managed-node1 auth_test_1_kube-auth_test_1_kube[33805]: This container is intended for podman CI testing\nJul 07 20:31:13 managed-node1 podman[33793]: 2025-07-07 20:31:13.451322866 -0400 EDT m=+0.120872788 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:13 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:13 managed-node1 conmon[33805]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events\nJul 07 20:31:13 managed-node1 podman[33815]: 2025-07-07 20:31:13.494306645 -0400 EDT m=+0.027771538 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:31:13 managed-node1 podman[33815]: 2025-07-07 20:31:13.507856915 -0400 EDT m=+0.041321733 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:13 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5342.\nJul 07 20:31:13 managed-node1 podman[33815]: 2025-07-07 20:31:13.565306542 -0400 EDT m=+0.098771304 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:13 managed-node1 podman[33815]: 2025-07-07 20:31:13.568114494 -0400 EDT m=+0.101579151 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:31:13 managed-node1 auth_test_1_kube-auth_test_1_kube[33854]: This container is intended for podman CI testing\nJul 07 20:31:13 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:13 managed-node1 podman[33858]: 2025-07-07 20:31:13.596989222 -0400 EDT m=+0.019144012 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:13 managed-node1 podman[33858]: 2025-07-07 20:31:13.609587102 -0400 EDT m=+0.031741858 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:13 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5349.\nJul 07 20:31:13 managed-node1 podman[33858]: 2025-07-07 20:31:13.654974021 -0400 EDT m=+0.077128773 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:13 managed-node1 podman[33858]: 2025-07-07 20:31:13.657907701 -0400 EDT m=+0.080062528 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:13 managed-node1 auth_test_1_kube-auth_test_1_kube[33869]: This container is intended for podman CI testing\nJul 07 20:31:13 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:13 managed-node1 podman[33873]: 2025-07-07 20:31:13.68697132 -0400 EDT m=+0.020528212 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:13 managed-node1 podman[33873]: 2025-07-07 20:31:13.699661283 -0400 EDT m=+0.033218130 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:13 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5356.\nJul 07 20:31:13 managed-node1 podman[33873]: 2025-07-07 20:31:13.747651843 -0400 EDT m=+0.081208736 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:13 managed-node1 podman[33873]: 2025-07-07 20:31:13.74987183 -0400 EDT m=+0.083428708 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:13 managed-node1 auth_test_1_kube-auth_test_1_kube[33884]: This container is intended for podman CI testing\nJul 07 20:31:13 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:13 managed-node1 conmon[33884]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events\nJul 07 20:31:13 managed-node1 podman[33888]: 2025-07-07 20:31:13.781993925 -0400 EDT m=+0.018849586 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:13 managed-node1 podman[33888]: 2025-07-07 20:31:13.79513907 -0400 EDT m=+0.031994709 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:31:13 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5363.\nJul 07 20:31:13 managed-node1 podman[33888]: 2025-07-07 20:31:13.844893937 -0400 EDT m=+0.081749582 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:13 managed-node1 podman[33888]: 2025-07-07 20:31:13.847083733 -0400 EDT m=+0.083939386 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:13 managed-node1 auth_test_1_kube-auth_test_1_kube[33899]: This container is intended for podman CI testing\nJul 07 20:31:13 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:13 managed-node1 podman[33903]: 2025-07-07 20:31:13.879757907 -0400 EDT m=+0.019444443 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:13 managed-node1 podman[33903]: 2025-07-07 20:31:13.892569335 -0400 EDT m=+0.032255795 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:13 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5370.\nJul 07 20:31:13 managed-node1 podman[33903]: 2025-07-07 20:31:13.941334631 -0400 EDT m=+0.081021075 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:13 managed-node1 podman[33903]: 2025-07-07 20:31:13.943528796 -0400 EDT m=+0.083215281 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:13 managed-node1 auth_test_1_kube-auth_test_1_kube[33915]: This container is intended for podman CI testing\nJul 07 20:31:13 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:13 managed-node1 podman[33919]: 2025-07-07 20:31:13.974589798 -0400 EDT m=+0.018767491 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:13 managed-node1 podman[33919]: 2025-07-07 20:31:13.98711363 -0400 EDT m=+0.031291332 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:14 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5377.\nJul 07 20:31:14 managed-node1 podman[33919]: 2025-07-07 20:31:14.032832185 -0400 EDT m=+0.077009950 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:14 managed-node1 podman[33919]: 2025-07-07 20:31:14.035068935 -0400 EDT m=+0.079246661 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:14 managed-node1 auth_test_1_kube-auth_test_1_kube[33930]: This container is intended for podman CI testing\nJul 07 20:31:14 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:14 managed-node1 podman[33934]: 2025-07-07 20:31:14.066569543 -0400 EDT m=+0.019394285 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:31:14 managed-node1 podman[33934]: 2025-07-07 20:31:14.079769619 -0400 EDT m=+0.032594319 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:14 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5384.\nJul 07 20:31:14 managed-node1 podman[33934]: 2025-07-07 20:31:14.12473653 -0400 EDT m=+0.077561363 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:31:14 managed-node1 podman[33934]: 2025-07-07 20:31:14.126932452 -0400 EDT m=+0.079757190 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:14 managed-node1 auth_test_1_kube-auth_test_1_kube[33945]: This container is intended for podman CI testing\nJul 07 20:31:14 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:14 managed-node1 podman[33949]: 2025-07-07 20:31:14.155224261 -0400 EDT m=+0.018922277 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:14 managed-node1 podman[33949]: 2025-07-07 20:31:14.168612894 -0400 EDT m=+0.032311030 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:14 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5391.\nJul 07 20:31:14 managed-node1 podman[33949]: 2025-07-07 20:31:14.215220959 -0400 EDT m=+0.078919011 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:14 managed-node1 podman[33949]: 2025-07-07 20:31:14.218912531 -0400 EDT m=+0.082610550 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:14 managed-node1 auth_test_1_kube-auth_test_1_kube[33960]: This container is intended for podman CI testing\nJul 07 20:31:14 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:14 managed-node1 conmon[33960]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events\nJul 07 20:31:14 managed-node1 podman[33964]: 2025-07-07 20:31:14.265654687 -0400 EDT m=+0.032124857 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:14 managed-node1 podman[33964]: 2025-07-07 20:31:14.279861874 -0400 EDT m=+0.046332005 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:14 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5398.\nJul 07 20:31:14 managed-node1 podman[33964]: 2025-07-07 20:31:14.334875201 -0400 EDT m=+0.101345567 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:14 managed-node1 auth_test_1_kube-auth_test_1_kube[34029]: This container is intended for podman CI testing\nJul 07 20:31:14 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:14 managed-node1 podman[33964]: 2025-07-07 20:31:14.34018797 -0400 EDT m=+0.106658397 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:14 managed-node1 conmon[34029]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events\nJul 07 20:31:14 managed-node1 podman[34055]: 2025-07-07 20:31:14.376429756 -0400 EDT m=+0.023451628 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:14 managed-node1 podman[34055]: 2025-07-07 20:31:14.395117482 -0400 EDT m=+0.042139287 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:14 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5405.\nJul 07 20:31:14 managed-node1 podman[34055]: 2025-07-07 20:31:14.450271735 -0400 EDT m=+0.097293739 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:31:14 managed-node1 auth_test_1_kube-auth_test_1_kube[34110]: This container is intended for podman CI testing\nJul 07 20:31:14 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:14 managed-node1 podman[34055]: 2025-07-07 20:31:14.456393784 -0400 EDT m=+0.103415526 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:14 managed-node1 podman[34125]: 2025-07-07 20:31:14.501062804 -0400 EDT m=+0.032413735 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:14 managed-node1 podman[34125]: 2025-07-07 20:31:14.518461254 -0400 EDT m=+0.049812015 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:31:14 managed-node1 python3.12[34123]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:31:14 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5412.\nJul 07 20:31:14 managed-node1 podman[34125]: 2025-07-07 20:31:14.630447885 -0400 EDT m=+0.161798906 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:14 managed-node1 auth_test_1_kube-auth_test_1_kube[34136]: This container is intended for podman CI testing\nJul 07 20:31:14 managed-node1 podman[34125]: 2025-07-07 20:31:14.635762544 -0400 EDT m=+0.167113303 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:14 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:14 managed-node1 conmon[34136]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events\nJul 07 20:31:14 managed-node1 podman[34142]: 2025-07-07 20:31:14.679832358 -0400 EDT m=+0.033244283 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:14 managed-node1 podman[34142]: 2025-07-07 20:31:14.692438145 -0400 EDT m=+0.045850011 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:14 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5419.\nJul 07 20:31:14 managed-node1 podman[34142]: 2025-07-07 20:31:14.74450265 -0400 EDT m=+0.097914575 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:31:14 managed-node1 auth_test_1_kube-auth_test_1_kube[34178]: This container is intended for podman CI testing\nJul 07 20:31:14 managed-node1 podman[34142]: 2025-07-07 20:31:14.746958703 -0400 EDT m=+0.100370582 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:14 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:14 managed-node1 podman[34182]: 2025-07-07 20:31:14.776198354 -0400 EDT m=+0.019494985 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:14 managed-node1 podman[34182]: 2025-07-07 20:31:14.788756224 -0400 EDT m=+0.032052812 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:14 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5426.\nJul 07 20:31:14 managed-node1 podman[34182]: 2025-07-07 20:31:14.836923153 -0400 EDT m=+0.080219738 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:14 managed-node1 podman[34182]: 2025-07-07 20:31:14.839183071 -0400 EDT m=+0.082479859 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:14 managed-node1 auth_test_1_kube-auth_test_1_kube[34193]: This container is intended for podman CI testing\nJul 07 20:31:14 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:14 managed-node1 podman[34197]: 2025-07-07 20:31:14.870828401 -0400 EDT m=+0.019244246 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:14 managed-node1 podman[34197]: 2025-07-07 20:31:14.883452864 -0400 EDT m=+0.031868683 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:14 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5433.\nJul 07 20:31:14 managed-node1 podman[34197]: 2025-07-07 20:31:14.93358477 -0400 EDT m=+0.082000607 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:14 managed-node1 podman[34197]: 2025-07-07 20:31:14.935834195 -0400 EDT m=+0.084250078 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:14 managed-node1 auth_test_1_kube-auth_test_1_kube[34208]: This container is intended for podman CI testing\nJul 07 20:31:14 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:14 managed-node1 podman[34212]: 2025-07-07 20:31:14.965314012 -0400 EDT m=+0.020484512 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:14 managed-node1 podman[34212]: 2025-07-07 20:31:14.977884857 -0400 EDT m=+0.033055350 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:15 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5440.\nJul 07 20:31:15 managed-node1 podman[34212]: 2025-07-07 20:31:15.026957421 -0400 EDT m=+0.082127960 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:15 managed-node1 podman[34212]: 2025-07-07 20:31:15.029574061 -0400 EDT m=+0.084744570 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:15 managed-node1 auth_test_1_kube-auth_test_1_kube[34224]: This container is intended for podman CI testing\nJul 07 20:31:15 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:15 managed-node1 podman[34228]: 2025-07-07 20:31:15.061604477 -0400 EDT m=+0.019250272 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:31:15 managed-node1 podman[34228]: 2025-07-07 20:31:15.074116308 -0400 EDT m=+0.031762079 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:15 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5447.\nJul 07 20:31:15 managed-node1 podman[34228]: 2025-07-07 20:31:15.126266773 -0400 EDT m=+0.083912530 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:15 managed-node1 podman[34228]: 2025-07-07 20:31:15.128457455 -0400 EDT m=+0.086103265 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:15 managed-node1 auth_test_1_kube-auth_test_1_kube[34239]: This container is intended for podman CI testing\nJul 07 20:31:15 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:15 managed-node1 podman[34243]: 2025-07-07 20:31:15.160290079 -0400 EDT m=+0.019739730 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:15 managed-node1 podman[34243]: 2025-07-07 20:31:15.173005861 -0400 EDT m=+0.032455438 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:15 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5454.\nJul 07 20:31:15 managed-node1 podman[34243]: 2025-07-07 20:31:15.224078649 -0400 EDT m=+0.083528292 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:15 managed-node1 auth_test_1_kube-auth_test_1_kube[34255]: This container is intended for podman CI testing\nJul 07 20:31:15 managed-node1 podman[34243]: 2025-07-07 20:31:15.226519624 -0400 EDT m=+0.085969316 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:15 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:15 managed-node1 podman[34259]: 2025-07-07 20:31:15.258364625 -0400 EDT m=+0.020012982 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:31:15 managed-node1 podman[34259]: 2025-07-07 20:31:15.271668865 -0400 EDT m=+0.033317190 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:15 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5461.\nJul 07 20:31:15 managed-node1 podman[34259]: 2025-07-07 20:31:15.313951727 -0400 EDT m=+0.075599997 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:15 managed-node1 podman[34259]: 2025-07-07 20:31:15.316172882 -0400 EDT m=+0.077821247 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:15 managed-node1 auth_test_1_kube-auth_test_1_kube[34270]: This container is intended for podman CI testing\nJul 07 20:31:15 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:15 managed-node1 podman[34274]: 2025-07-07 20:31:15.344612573 -0400 EDT m=+0.019589464 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:15 managed-node1 podman[34274]: 2025-07-07 20:31:15.357001121 -0400 EDT m=+0.031977981 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:15 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5468.\nJul 07 20:31:15 managed-node1 podman[34274]: 2025-07-07 20:31:15.404767874 -0400 EDT m=+0.079744848 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:15 managed-node1 podman[34274]: 2025-07-07 20:31:15.406968578 -0400 EDT m=+0.081945501 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:15 managed-node1 auth_test_1_kube-auth_test_1_kube[34285]: This container is intended for podman CI testing\nJul 07 20:31:15 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:15 managed-node1 podman[34289]: 2025-07-07 20:31:15.435334159 -0400 EDT m=+0.019393882 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:31:15 managed-node1 podman[34289]: 2025-07-07 20:31:15.448792134 -0400 EDT m=+0.032851696 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:15 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5475.\nJul 07 20:31:15 managed-node1 podman[34289]: 2025-07-07 20:31:15.497735523 -0400 EDT m=+0.081795097 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:31:15 managed-node1 podman[34289]: 2025-07-07 20:31:15.500052412 -0400 EDT m=+0.084111958 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:15 managed-node1 auth_test_1_kube-auth_test_1_kube[34300]: This container is intended for podman CI testing\nJul 07 20:31:15 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:15 managed-node1 podman[34304]: 2025-07-07 20:31:15.531670941 -0400 EDT m=+0.019281505 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:15 managed-node1 podman[34304]: 2025-07-07 20:31:15.54408611 -0400 EDT m=+0.031696642 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:31:15 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5482.\nJul 07 20:31:15 managed-node1 podman[34304]: 2025-07-07 20:31:15.592327547 -0400 EDT m=+0.079938093 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:15 managed-node1 podman[34304]: 2025-07-07 20:31:15.594549246 -0400 EDT m=+0.082159810 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:15 managed-node1 auth_test_1_kube-auth_test_1_kube[34316]: This container is intended for podman CI testing\nJul 07 20:31:15 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:15 managed-node1 podman[34320]: 2025-07-07 20:31:15.625754289 -0400 EDT m=+0.019330333 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:15 managed-node1 podman[34320]: 2025-07-07 20:31:15.638955363 -0400 EDT m=+0.032531274 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:15 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5489.\nJul 07 20:31:15 managed-node1 podman[34320]: 2025-07-07 20:31:15.683628604 -0400 EDT m=+0.077204606 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:31:15 managed-node1 podman[34320]: 2025-07-07 20:31:15.686024497 -0400 EDT m=+0.079600478 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:15 managed-node1 auth_test_1_kube-auth_test_1_kube[34332]: This container is intended for podman CI testing\nJul 07 20:31:15 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:15 managed-node1 podman[34336]: 2025-07-07 20:31:15.719468906 -0400 EDT m=+0.020475172 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:15 managed-node1 podman[34336]: 2025-07-07 20:31:15.732296519 -0400 EDT m=+0.033302770 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:15 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5496.\nJul 07 20:31:15 managed-node1 podman[34336]: 2025-07-07 20:31:15.783618488 -0400 EDT m=+0.084624797 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:15 managed-node1 podman[34336]: 2025-07-07 20:31:15.786287122 -0400 EDT m=+0.087293426 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:15 managed-node1 auth_test_1_kube-auth_test_1_kube[34347]: This container is intended for podman CI testing\nJul 07 20:31:15 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:15 managed-node1 conmon[34347]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events\nJul 07 20:31:15 managed-node1 podman[34351]: 2025-07-07 20:31:15.818032646 -0400 EDT m=+0.019738574 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:15 managed-node1 podman[34351]: 2025-07-07 20:31:15.830825849 -0400 EDT m=+0.032531760 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:31:15 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5503.\nJul 07 20:31:15 managed-node1 podman[34351]: 2025-07-07 20:31:15.873881148 -0400 EDT m=+0.075587030 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:15 managed-node1 podman[34351]: 2025-07-07 20:31:15.876142251 -0400 EDT m=+0.077848220 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:15 managed-node1 auth_test_1_kube-auth_test_1_kube[34362]: This container is intended for podman CI testing\nJul 07 20:31:15 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:15 managed-node1 conmon[34362]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events\nJul 07 20:31:15 managed-node1 podman[34366]: 2025-07-07 20:31:15.905032665 -0400 EDT m=+0.019851791 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:15 managed-node1 podman[34366]: 2025-07-07 20:31:15.917579888 -0400 EDT m=+0.032399005 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:15 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5510.\nJul 07 20:31:15 managed-node1 podman[34366]: 2025-07-07 20:31:15.963523299 -0400 EDT m=+0.078342439 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:15 managed-node1 podman[34366]: 2025-07-07 20:31:15.965793476 -0400 EDT m=+0.080612642 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:15 managed-node1 auth_test_1_kube-auth_test_1_kube[34378]: This container is intended for podman CI testing\nJul 07 20:31:15 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:15 managed-node1 conmon[34378]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events\nJul 07 20:31:15 managed-node1 podman[34382]: 2025-07-07 20:31:15.994098465 -0400 EDT m=+0.018663496 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:16 managed-node1 podman[34382]: 2025-07-07 20:31:16.006855796 -0400 EDT m=+0.031420807 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:31:16 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5517.\nJul 07 20:31:16 managed-node1 podman[34382]: 2025-07-07 20:31:16.051920432 -0400 EDT m=+0.076485439 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:16 managed-node1 podman[34382]: 2025-07-07 20:31:16.054144472 -0400 EDT m=+0.078709526 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:16 managed-node1 auth_test_1_kube-auth_test_1_kube[34393]: This container is intended for podman CI testing\nJul 07 20:31:16 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:16 managed-node1 conmon[34393]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events\nJul 07 20:31:16 managed-node1 podman[34397]: 2025-07-07 20:31:16.087613293 -0400 EDT m=+0.020828534 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:16 managed-node1 podman[34397]: 2025-07-07 20:31:16.10016147 -0400 EDT m=+0.033376682 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:16 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5524.\nJul 07 20:31:16 managed-node1 podman[34397]: 2025-07-07 20:31:16.151131621 -0400 EDT m=+0.084346910 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:16 managed-node1 podman[34397]: 2025-07-07 20:31:16.153850184 -0400 EDT m=+0.087065443 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:31:16 managed-node1 auth_test_1_kube-auth_test_1_kube[34409]: This container is intended for podman CI testing\nJul 07 20:31:16 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:16 managed-node1 conmon[34409]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events\nJul 07 20:31:16 managed-node1 podman[34413]: 2025-07-07 20:31:16.184001697 -0400 EDT m=+0.019611777 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:31:16 managed-node1 podman[34413]: 2025-07-07 20:31:16.196698451 -0400 EDT m=+0.032308515 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:16 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5531.\nJul 07 20:31:16 managed-node1 podman[34413]: 2025-07-07 20:31:16.242174277 -0400 EDT m=+0.077784338 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:16 managed-node1 podman[34413]: 2025-07-07 20:31:16.244405166 -0400 EDT m=+0.080015426 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:16 managed-node1 auth_test_1_kube-auth_test_1_kube[34424]: This container is intended for podman CI testing\nJul 07 20:31:16 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:16 managed-node1 conmon[34424]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events\nJul 07 20:31:16 managed-node1 podman[34428]: 2025-07-07 20:31:16.274633397 -0400 EDT m=+0.019295849 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:16 managed-node1 podman[34428]: 2025-07-07 20:31:16.287296957 -0400 EDT m=+0.031959384 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:16 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5538.\nJul 07 20:31:16 managed-node1 podman[34428]: 2025-07-07 20:31:16.336618852 -0400 EDT m=+0.081281359 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:16 managed-node1 podman[34428]: 2025-07-07 20:31:16.338866161 -0400 EDT m=+0.083528626 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:16 managed-node1 auth_test_1_kube-auth_test_1_kube[34439]: This container is intended for podman CI testing\nJul 07 20:31:16 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:16 managed-node1 podman[34443]: 2025-07-07 20:31:16.371476668 -0400 EDT m=+0.019018051 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:16 managed-node1 podman[34443]: 2025-07-07 20:31:16.382957645 -0400 EDT m=+0.030498984 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:16 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5545.\nJul 07 20:31:16 managed-node1 podman[34443]: 2025-07-07 20:31:16.426945682 -0400 EDT m=+0.074487039 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:16 managed-node1 podman[34443]: 2025-07-07 20:31:16.429176173 -0400 EDT m=+0.076717563 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:16 managed-node1 auth_test_1_kube-auth_test_1_kube[34454]: This container is intended for podman CI testing\nJul 07 20:31:16 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:16 managed-node1 conmon[34454]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events\nJul 07 20:31:16 managed-node1 podman[34458]: 2025-07-07 20:31:16.462935007 -0400 EDT m=+0.021082930 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:16 managed-node1 podman[34458]: 2025-07-07 20:31:16.475166055 -0400 EDT m=+0.033313981 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:31:16 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5552.\nJul 07 20:31:16 managed-node1 podman[34458]: 2025-07-07 20:31:16.527868549 -0400 EDT m=+0.086016469 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:16 managed-node1 podman[34458]: 2025-07-07 20:31:16.530602277 -0400 EDT m=+0.088750254 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:16 managed-node1 auth_test_1_kube-auth_test_1_kube[34469]: This container is intended for podman CI testing\nJul 07 20:31:16 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:16 managed-node1 podman[34473]: 2025-07-07 20:31:16.563116714 -0400 EDT m=+0.019184411 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:16 managed-node1 podman[34473]: 2025-07-07 20:31:16.575253939 -0400 EDT m=+0.031321623 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:16 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5559.\nJul 07 20:31:16 managed-node1 podman[34473]: 2025-07-07 20:31:16.630976001 -0400 EDT m=+0.087043705 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:16 managed-node1 podman[34473]: 2025-07-07 20:31:16.633675896 -0400 EDT m=+0.089743616 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:16 managed-node1 auth_test_1_kube-auth_test_1_kube[34485]: This container is intended for podman CI testing\nJul 07 20:31:16 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:16 managed-node1 podman[34489]: 2025-07-07 20:31:16.666341559 -0400 EDT m=+0.019125869 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:16 managed-node1 podman[34489]: 2025-07-07 20:31:16.678990158 -0400 EDT m=+0.031774458 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:31:16 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5566.\nJul 07 20:31:16 managed-node1 podman[34489]: 2025-07-07 20:31:16.72243168 -0400 EDT m=+0.075216031 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:16 managed-node1 podman[34489]: 2025-07-07 20:31:16.725180691 -0400 EDT m=+0.077965012 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:16 managed-node1 auth_test_1_kube-auth_test_1_kube[34500]: This container is intended for podman CI testing\nJul 07 20:31:16 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:16 managed-node1 podman[34504]: 2025-07-07 20:31:16.754814029 -0400 EDT m=+0.021391737 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:16 managed-node1 podman[34504]: 2025-07-07 20:31:16.767687927 -0400 EDT m=+0.034265655 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:16 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5573.\nJul 07 20:31:16 managed-node1 podman[34504]: 2025-07-07 20:31:16.813137127 -0400 EDT m=+0.079714972 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:16 managed-node1 podman[34504]: 2025-07-07 20:31:16.815411872 -0400 EDT m=+0.081989653 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:16 managed-node1 auth_test_1_kube-auth_test_1_kube[34515]: This container is intended for podman CI testing\nJul 07 20:31:16 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:16 managed-node1 conmon[34515]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events\nJul 07 20:31:16 managed-node1 podman[34519]: 2025-07-07 20:31:16.849132759 -0400 EDT m=+0.018817320 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:16 managed-node1 podman[34519]: 2025-07-07 20:31:16.862312923 -0400 EDT m=+0.031997574 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:16 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5580.\nJul 07 20:31:16 managed-node1 podman[34519]: 2025-07-07 20:31:16.906674999 -0400 EDT m=+0.076359616 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:16 managed-node1 podman[34519]: 2025-07-07 20:31:16.909880486 -0400 EDT m=+0.079565172 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:16 managed-node1 auth_test_1_kube-auth_test_1_kube[34530]: This container is intended for podman CI testing\nJul 07 20:31:16 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:16 managed-node1 podman[34534]: 2025-07-07 20:31:16.95844908 -0400 EDT m=+0.033673257 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:16 managed-node1 podman[34534]: 2025-07-07 20:31:16.971595578 -0400 EDT m=+0.046819804 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:17 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5587.\nJul 07 20:31:17 managed-node1 podman[34534]: 2025-07-07 20:31:17.025056202 -0400 EDT m=+0.100280598 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:17 managed-node1 podman[34534]: 2025-07-07 20:31:17.029457616 -0400 EDT m=+0.104681754 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:17 managed-node1 auth_test_1_kube-auth_test_1_kube[34599]: This container is intended for podman CI testing\nJul 07 20:31:17 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:17 managed-node1 podman[34627]: 2025-07-07 20:31:17.070393858 -0400 EDT m=+0.025970516 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:17 managed-node1 podman[34627]: 2025-07-07 20:31:17.084512102 -0400 EDT m=+0.040088565 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:17 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5594.\nJul 07 20:31:17 managed-node1 podman[34627]: 2025-07-07 20:31:17.177102708 -0400 EDT m=+0.132679404 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:17 managed-node1 auth_test_1_kube-auth_test_1_kube[34683]: This container is intended for podman CI testing\nJul 07 20:31:17 managed-node1 podman[34627]: 2025-07-07 20:31:17.180894161 -0400 EDT m=+0.136470572 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:17 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:17 managed-node1 podman[34695]: 2025-07-07 20:31:17.231847651 -0400 EDT m=+0.034297314 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:17 managed-node1 podman[34695]: 2025-07-07 20:31:17.246740629 -0400 EDT m=+0.049190236 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:31:17 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5601.\nJul 07 20:31:17 managed-node1 python3.12[34693]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:31:17 managed-node1 podman[34695]: 2025-07-07 20:31:17.30599944 -0400 EDT m=+0.108449088 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:31:17 managed-node1 podman[34695]: 2025-07-07 20:31:17.308938661 -0400 EDT m=+0.111388407 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:17 managed-node1 auth_test_1_kube-auth_test_1_kube[34706]: This container is intended for podman CI testing\nJul 07 20:31:17 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:17 managed-node1 conmon[34706]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events\nJul 07 20:31:17 managed-node1 podman[34713]: 2025-07-07 20:31:17.350459916 -0400 EDT m=+0.024235490 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:17 managed-node1 podman[34713]: 2025-07-07 20:31:17.365971326 -0400 EDT m=+0.039746708 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:17 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5608.\nJul 07 20:31:17 managed-node1 podman[34713]: 2025-07-07 20:31:17.415348107 -0400 EDT m=+0.089123488 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:17 managed-node1 podman[34713]: 2025-07-07 20:31:17.417592199 -0400 EDT m=+0.091367661 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:17 managed-node1 auth_test_1_kube-auth_test_1_kube[34750]: This container is intended for podman CI testing\nJul 07 20:31:17 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:17 managed-node1 podman[34754]: 2025-07-07 20:31:17.448087244 -0400 EDT m=+0.021608245 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:17 managed-node1 podman[34754]: 2025-07-07 20:31:17.460699437 -0400 EDT m=+0.034220411 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:17 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5615.\nJul 07 20:31:17 managed-node1 podman[34754]: 2025-07-07 20:31:17.506809745 -0400 EDT m=+0.080330782 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:17 managed-node1 podman[34754]: 2025-07-07 20:31:17.509127066 -0400 EDT m=+0.082648093 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:17 managed-node1 auth_test_1_kube-auth_test_1_kube[34765]: This container is intended for podman CI testing\nJul 07 20:31:17 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:17 managed-node1 podman[34769]: 2025-07-07 20:31:17.542481404 -0400 EDT m=+0.021358978 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:17 managed-node1 podman[34769]: 2025-07-07 20:31:17.5570775 -0400 EDT m=+0.035954986 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:17 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5622.\nJul 07 20:31:17 managed-node1 podman[34769]: 2025-07-07 20:31:17.60711597 -0400 EDT m=+0.085993501 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:17 managed-node1 podman[34769]: 2025-07-07 20:31:17.609755396 -0400 EDT m=+0.088632916 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:17 managed-node1 auth_test_1_kube-auth_test_1_kube[34780]: This container is intended for podman CI testing\nJul 07 20:31:17 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:17 managed-node1 podman[34784]: 2025-07-07 20:31:17.640012378 -0400 EDT m=+0.021140746 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:17 managed-node1 podman[34784]: 2025-07-07 20:31:17.652490475 -0400 EDT m=+0.033618838 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:17 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5629.\nJul 07 20:31:17 managed-node1 podman[34784]: 2025-07-07 20:31:17.70011701 -0400 EDT m=+0.081245479 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:31:17 managed-node1 auth_test_1_kube-auth_test_1_kube[34795]: This container is intended for podman CI testing\nJul 07 20:31:17 managed-node1 podman[34784]: 2025-07-07 20:31:17.703762734 -0400 EDT m=+0.084891179 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:31:17 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:17 managed-node1 podman[34799]: 2025-07-07 20:31:17.736074614 -0400 EDT m=+0.020056195 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:17 managed-node1 podman[34799]: 2025-07-07 20:31:17.748527888 -0400 EDT m=+0.032509382 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:17 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5636.\nJul 07 20:31:17 managed-node1 podman[34799]: 2025-07-07 20:31:17.79543748 -0400 EDT m=+0.079419067 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:17 managed-node1 podman[34799]: 2025-07-07 20:31:17.798751634 -0400 EDT m=+0.082733139 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:17 managed-node1 auth_test_1_kube-auth_test_1_kube[34810]: This container is intended for podman CI testing\nJul 07 20:31:17 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:17 managed-node1 conmon[34810]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events\nJul 07 20:31:17 managed-node1 podman[34814]: 2025-07-07 20:31:17.831629085 -0400 EDT m=+0.020108874 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:31:17 managed-node1 podman[34814]: 2025-07-07 20:31:17.844326128 -0400 EDT m=+0.032805907 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:17 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5643.\nJul 07 20:31:17 managed-node1 podman[34814]: 2025-07-07 20:31:17.898323544 -0400 EDT m=+0.086803336 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:17 managed-node1 podman[34814]: 2025-07-07 20:31:17.900673692 -0400 EDT m=+0.089153423 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:17 managed-node1 auth_test_1_kube-auth_test_1_kube[34825]: This container is intended for podman CI testing\nJul 07 20:31:17 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:17 managed-node1 podman[34829]: 2025-07-07 20:31:17.931521246 -0400 EDT m=+0.021848806 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:17 managed-node1 podman[34829]: 2025-07-07 20:31:17.94406602 -0400 EDT m=+0.034393554 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:17 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5650.\nJul 07 20:31:17 managed-node1 podman[34829]: 2025-07-07 20:31:17.996305996 -0400 EDT m=+0.086633529 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:31:17 managed-node1 podman[34829]: 2025-07-07 20:31:17.998742848 -0400 EDT m=+0.089070425 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:17 managed-node1 auth_test_1_kube-auth_test_1_kube[34841]: This container is intended for podman CI testing\nJul 07 20:31:18 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:18 managed-node1 podman[34845]: 2025-07-07 20:31:18.029185879 -0400 EDT m=+0.019114003 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:18 managed-node1 podman[34845]: 2025-07-07 20:31:18.042521069 -0400 EDT m=+0.032449143 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:18 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5657.\nJul 07 20:31:18 managed-node1 podman[34845]: 2025-07-07 20:31:18.092988624 -0400 EDT m=+0.082916705 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:18 managed-node1 podman[34845]: 2025-07-07 20:31:18.095313174 -0400 EDT m=+0.085241278 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:31:18 managed-node1 auth_test_1_kube-auth_test_1_kube[34857]: This container is intended for podman CI testing\nJul 07 20:31:18 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:18 managed-node1 podman[34861]: 2025-07-07 20:31:18.129668866 -0400 EDT m=+0.021224565 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:18 managed-node1 podman[34861]: 2025-07-07 20:31:18.142326085 -0400 EDT m=+0.033881760 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:18 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5664.\nJul 07 20:31:18 managed-node1 podman[34861]: 2025-07-07 20:31:18.19279771 -0400 EDT m=+0.084353472 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:18 managed-node1 podman[34861]: 2025-07-07 20:31:18.195648063 -0400 EDT m=+0.087203915 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:18 managed-node1 auth_test_1_kube-auth_test_1_kube[34872]: This container is intended for podman CI testing\nJul 07 20:31:18 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:18 managed-node1 podman[34876]: 2025-07-07 20:31:18.229439645 -0400 EDT m=+0.020253492 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:18 managed-node1 podman[34876]: 2025-07-07 20:31:18.24218817 -0400 EDT m=+0.033002023 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:18 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5671.\nJul 07 20:31:18 managed-node1 podman[34876]: 2025-07-07 20:31:18.29392756 -0400 EDT m=+0.084741430 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:18 managed-node1 podman[34876]: 2025-07-07 20:31:18.296287803 -0400 EDT m=+0.087101686 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:18 managed-node1 auth_test_1_kube-auth_test_1_kube[34888]: This container is intended for podman CI testing\nJul 07 20:31:18 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:18 managed-node1 podman[34892]: 2025-07-07 20:31:18.32955635 -0400 EDT m=+0.020182773 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:31:18 managed-node1 podman[34892]: 2025-07-07 20:31:18.342477137 -0400 EDT m=+0.033103451 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:18 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5678.\nJul 07 20:31:18 managed-node1 podman[34892]: 2025-07-07 20:31:18.394419816 -0400 EDT m=+0.085046197 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:18 managed-node1 podman[34892]: 2025-07-07 20:31:18.397243808 -0400 EDT m=+0.087870271 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:18 managed-node1 auth_test_1_kube-auth_test_1_kube[34903]: This container is intended for podman CI testing\nJul 07 20:31:18 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:18 managed-node1 podman[34907]: 2025-07-07 20:31:18.429250615 -0400 EDT m=+0.019856460 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:18 managed-node1 podman[34907]: 2025-07-07 20:31:18.441783606 -0400 EDT m=+0.032389406 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:18 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5685.\nJul 07 20:31:18 managed-node1 podman[34907]: 2025-07-07 20:31:18.499091104 -0400 EDT m=+0.089697198 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:31:18 managed-node1 podman[34907]: 2025-07-07 20:31:18.503340104 -0400 EDT m=+0.093946062 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:31:18 managed-node1 auth_test_1_kube-auth_test_1_kube[34943]: This container is intended for podman CI testing\nJul 07 20:31:18 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:18 managed-node1 podman[34964]: 2025-07-07 20:31:18.543763048 -0400 EDT m=+0.023752219 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:18 managed-node1 podman[34964]: 2025-07-07 20:31:18.560167521 -0400 EDT m=+0.040156547 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:18 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5692.\nJul 07 20:31:18 managed-node1 podman[34964]: 2025-07-07 20:31:18.612046282 -0400 EDT m=+0.092035672 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:18 managed-node1 auth_test_1_kube-auth_test_1_kube[35012]: This container is intended for podman CI testing\nJul 07 20:31:18 managed-node1 podman[34964]: 2025-07-07 20:31:18.615806989 -0400 EDT m=+0.095796050 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:18 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:18 managed-node1 conmon[35012]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events\nJul 07 20:31:18 managed-node1 podman[35041]: 2025-07-07 20:31:18.668478401 -0400 EDT m=+0.035538417 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:18 managed-node1 podman[35041]: 2025-07-07 20:31:18.683517085 -0400 EDT m=+0.050577052 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:18 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5699.\nJul 07 20:31:18 managed-node1 podman[35041]: 2025-07-07 20:31:18.763957069 -0400 EDT m=+0.131017381 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:18 managed-node1 auth_test_1_kube-auth_test_1_kube[35079]: This container is intended for podman CI testing\nJul 07 20:31:18 managed-node1 podman[35041]: 2025-07-07 20:31:18.767955059 -0400 EDT m=+0.135015017 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:18 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:18 managed-node1 podman[35085]: 2025-07-07 20:31:18.820556221 -0400 EDT m=+0.036984989 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:18 managed-node1 podman[35085]: 2025-07-07 20:31:18.836884195 -0400 EDT m=+0.053313095 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:18 managed-node1 python3.12[35074]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:31:18 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5706.\nJul 07 20:31:18 managed-node1 podman[35085]: 2025-07-07 20:31:18.897084147 -0400 EDT m=+0.113513334 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:18 managed-node1 auth_test_1_kube-auth_test_1_kube[35098]: This container is intended for podman CI testing\nJul 07 20:31:18 managed-node1 podman[35085]: 2025-07-07 20:31:18.901223207 -0400 EDT m=+0.117651974 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:18 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:18 managed-node1 conmon[35098]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events\nJul 07 20:31:18 managed-node1 podman[35123]: 2025-07-07 20:31:18.935007612 -0400 EDT m=+0.019974481 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:18 managed-node1 podman[35123]: 2025-07-07 20:31:18.947889427 -0400 EDT m=+0.032856264 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:18 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5713.\nJul 07 20:31:18 managed-node1 podman[35123]: 2025-07-07 20:31:18.991577845 -0400 EDT m=+0.076544712 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:31:18 managed-node1 podman[35123]: 2025-07-07 20:31:18.993849057 -0400 EDT m=+0.078815939 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:31:18 managed-node1 auth_test_1_kube-auth_test_1_kube[35138]: This container is intended for podman CI testing\nJul 07 20:31:18 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:18 managed-node1 conmon[35138]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events\nJul 07 20:31:19 managed-node1 podman[35143]: 2025-07-07 20:31:19.023419114 -0400 EDT m=+0.019988935 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:19 managed-node1 podman[35143]: 2025-07-07 20:31:19.036118694 -0400 EDT m=+0.032688485 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:19 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5720.\nJul 07 20:31:19 managed-node1 podman[35143]: 2025-07-07 20:31:19.087366897 -0400 EDT m=+0.083936778 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:31:19 managed-node1 podman[35143]: 2025-07-07 20:31:19.089680483 -0400 EDT m=+0.086250306 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:19 managed-node1 auth_test_1_kube-auth_test_1_kube[35154]: This container is intended for podman CI testing\nJul 07 20:31:19 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:19 managed-node1 podman[35158]: 2025-07-07 20:31:19.122637177 -0400 EDT m=+0.020242589 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:31:19 managed-node1 podman[35158]: 2025-07-07 20:31:19.135301853 -0400 EDT m=+0.032907248 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:19 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5727.\nJul 07 20:31:19 managed-node1 podman[35158]: 2025-07-07 20:31:19.186419369 -0400 EDT m=+0.084024763 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:19 managed-node1 podman[35158]: 2025-07-07 20:31:19.188698909 -0400 EDT m=+0.086304571 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:19 managed-node1 auth_test_1_kube-auth_test_1_kube[35170]: This container is intended for podman CI testing\nJul 07 20:31:19 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:19 managed-node1 podman[35174]: 2025-07-07 20:31:19.217758606 -0400 EDT m=+0.020317035 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:19 managed-node1 podman[35174]: 2025-07-07 20:31:19.230886804 -0400 EDT m=+0.033445173 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:19 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5734.\nJul 07 20:31:19 managed-node1 podman[35174]: 2025-07-07 20:31:19.275277786 -0400 EDT m=+0.077836183 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:19 managed-node1 podman[35174]: 2025-07-07 20:31:19.277533735 -0400 EDT m=+0.080092190 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:19 managed-node1 auth_test_1_kube-auth_test_1_kube[35185]: This container is intended for podman CI testing\nJul 07 20:31:19 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:19 managed-node1 conmon[35185]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events\nJul 07 20:31:19 managed-node1 podman[35190]: 2025-07-07 20:31:19.309991866 -0400 EDT m=+0.019488519 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:31:19 managed-node1 podman[35190]: 2025-07-07 20:31:19.322658146 -0400 EDT m=+0.032154753 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:19 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5741.\nJul 07 20:31:19 managed-node1 podman[35190]: 2025-07-07 20:31:19.370610614 -0400 EDT m=+0.080107346 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:19 managed-node1 podman[35190]: 2025-07-07 20:31:19.372876842 -0400 EDT m=+0.082373476 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:19 managed-node1 auth_test_1_kube-auth_test_1_kube[35201]: This container is intended for podman CI testing\nJul 07 20:31:19 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:19 managed-node1 podman[35205]: 2025-07-07 20:31:19.404869858 -0400 EDT m=+0.019126064 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:19 managed-node1 podman[35205]: 2025-07-07 20:31:19.417976636 -0400 EDT m=+0.032232823 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:19 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5748.\nJul 07 20:31:19 managed-node1 podman[35205]: 2025-07-07 20:31:19.470131778 -0400 EDT m=+0.084388043 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:19 managed-node1 podman[35205]: 2025-07-07 20:31:19.472376286 -0400 EDT m=+0.086632540 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:19 managed-node1 auth_test_1_kube-auth_test_1_kube[35216]: This container is intended for podman CI testing\nJul 07 20:31:19 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:19 managed-node1 podman[35220]: 2025-07-07 20:31:19.503576586 -0400 EDT m=+0.019196560 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:19 managed-node1 podman[35220]: 2025-07-07 20:31:19.516745509 -0400 EDT m=+0.032365490 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:31:19 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5755.\nJul 07 20:31:19 managed-node1 podman[35220]: 2025-07-07 20:31:19.564932147 -0400 EDT m=+0.080552114 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:31:19 managed-node1 podman[35220]: 2025-07-07 20:31:19.567622797 -0400 EDT m=+0.083242787 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:31:19 managed-node1 auth_test_1_kube-auth_test_1_kube[35232]: This container is intended for podman CI testing\nJul 07 20:31:19 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:19 managed-node1 podman[35236]: 2025-07-07 20:31:19.59907771 -0400 EDT m=+0.019603882 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:19 managed-node1 podman[35236]: 2025-07-07 20:31:19.611520721 -0400 EDT m=+0.032046839 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:19 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5762.\nJul 07 20:31:19 managed-node1 podman[35236]: 2025-07-07 20:31:19.65544326 -0400 EDT m=+0.075969379 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:19 managed-node1 podman[35236]: 2025-07-07 20:31:19.658256486 -0400 EDT m=+0.078782651 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:19 managed-node1 auth_test_1_kube-auth_test_1_kube[35248]: This container is intended for podman CI testing\nJul 07 20:31:19 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:19 managed-node1 podman[35252]: 2025-07-07 20:31:19.6872466 -0400 EDT m=+0.019976252 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:19 managed-node1 podman[35252]: 2025-07-07 20:31:19.699843173 -0400 EDT m=+0.032572770 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:31:19 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5769.\nJul 07 20:31:19 managed-node1 podman[35252]: 2025-07-07 20:31:19.748941031 -0400 EDT m=+0.081670642 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:19 managed-node1 podman[35252]: 2025-07-07 20:31:19.751143098 -0400 EDT m=+0.083872738 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:19 managed-node1 auth_test_1_kube-auth_test_1_kube[35263]: This container is intended for podman CI testing\nJul 07 20:31:19 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:19 managed-node1 podman[35267]: 2025-07-07 20:31:19.780255667 -0400 EDT m=+0.019815074 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:19 managed-node1 podman[35267]: 2025-07-07 20:31:19.793478385 -0400 EDT m=+0.033037771 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:19 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5776.\nJul 07 20:31:19 managed-node1 podman[35267]: 2025-07-07 20:31:19.850378106 -0400 EDT m=+0.089937499 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:19 managed-node1 podman[35267]: 2025-07-07 20:31:19.852611996 -0400 EDT m=+0.092171444 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:19 managed-node1 auth_test_1_kube-auth_test_1_kube[35279]: This container is intended for podman CI testing\nJul 07 20:31:19 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:19 managed-node1 podman[35283]: 2025-07-07 20:31:19.881451267 -0400 EDT m=+0.019768672 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:19 managed-node1 podman[35283]: 2025-07-07 20:31:19.894469235 -0400 EDT m=+0.032786593 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:19 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5783.\nJul 07 20:31:19 managed-node1 podman[35283]: 2025-07-07 20:31:19.947127502 -0400 EDT m=+0.085444912 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:19 managed-node1 auth_test_1_kube-auth_test_1_kube[35295]: This container is intended for podman CI testing\nJul 07 20:31:19 managed-node1 podman[35283]: 2025-07-07 20:31:19.949981816 -0400 EDT m=+0.088299203 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:19 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:19 managed-node1 podman[35299]: 2025-07-07 20:31:19.977641859 -0400 EDT m=+0.019122198 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:19 managed-node1 podman[35299]: 2025-07-07 20:31:19.989990665 -0400 EDT m=+0.031470962 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:20 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5790.\nJul 07 20:31:20 managed-node1 podman[35299]: 2025-07-07 20:31:20.03595698 -0400 EDT m=+0.077437358 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:20 managed-node1 auth_test_1_kube-auth_test_1_kube[35310]: This container is intended for podman CI testing\nJul 07 20:31:20 managed-node1 podman[35299]: 2025-07-07 20:31:20.038750281 -0400 EDT m=+0.080230711 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:31:20 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:20 managed-node1 podman[35314]: 2025-07-07 20:31:20.069430734 -0400 EDT m=+0.019157221 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:20 managed-node1 podman[35314]: 2025-07-07 20:31:20.082223442 -0400 EDT m=+0.031949882 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:20 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5797.\nJul 07 20:31:20 managed-node1 podman[35314]: 2025-07-07 20:31:20.125758126 -0400 EDT m=+0.075484648 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:20 managed-node1 podman[35314]: 2025-07-07 20:31:20.127991237 -0400 EDT m=+0.077717738 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:31:20 managed-node1 auth_test_1_kube-auth_test_1_kube[35325]: This container is intended for podman CI testing\nJul 07 20:31:20 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:20 managed-node1 podman[35329]: 2025-07-07 20:31:20.173287508 -0400 EDT m=+0.031612849 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:20 managed-node1 podman[35329]: 2025-07-07 20:31:20.186746552 -0400 EDT m=+0.045071956 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:20 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5804.\nJul 07 20:31:20 managed-node1 podman[35329]: 2025-07-07 20:31:20.23913445 -0400 EDT m=+0.097459723 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:20 managed-node1 podman[35329]: 2025-07-07 20:31:20.244356296 -0400 EDT m=+0.102681531 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:20 managed-node1 auth_test_1_kube-auth_test_1_kube[35396]: This container is intended for podman CI testing\nJul 07 20:31:20 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:20 managed-node1 podman[35424]: 2025-07-07 20:31:20.287762482 -0400 EDT m=+0.028376300 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:20 managed-node1 podman[35424]: 2025-07-07 20:31:20.30096729 -0400 EDT m=+0.041580947 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:20 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5811.\nJul 07 20:31:20 managed-node1 podman[35424]: 2025-07-07 20:31:20.391073306 -0400 EDT m=+0.131687162 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:20 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:20 managed-node1 conmon[35487]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events\nJul 07 20:31:20 managed-node1 auth_test_1_kube-auth_test_1_kube[35487]: This container is intended for podman CI testing\nJul 07 20:31:20 managed-node1 podman[35424]: 2025-07-07 20:31:20.397762343 -0400 EDT m=+0.138376159 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:20 managed-node1 podman[35492]: 2025-07-07 20:31:20.447438415 -0400 EDT m=+0.034439229 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:20 managed-node1 podman[35492]: 2025-07-07 20:31:20.461905103 -0400 EDT m=+0.048905869 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:20 managed-node1 python3.12[35489]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:31:20 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5818.\nJul 07 20:31:20 managed-node1 podman[35492]: 2025-07-07 20:31:20.533894439 -0400 EDT m=+0.120895390 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:20 managed-node1 podman[35492]: 2025-07-07 20:31:20.537755393 -0400 EDT m=+0.124756070 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:20 managed-node1 auth_test_1_kube-auth_test_1_kube[35503]: This container is intended for podman CI testing\nJul 07 20:31:20 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:20 managed-node1 conmon[35503]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events\nJul 07 20:31:20 managed-node1 podman[35510]: 2025-07-07 20:31:20.584899484 -0400 EDT m=+0.036017052 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:31:20 managed-node1 podman[35510]: 2025-07-07 20:31:20.599680844 -0400 EDT m=+0.050798057 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:20 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5825.\nJul 07 20:31:20 managed-node1 podman[35510]: 2025-07-07 20:31:20.65090971 -0400 EDT m=+0.102027154 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:20 managed-node1 auth_test_1_kube-auth_test_1_kube[35546]: This container is intended for podman CI testing\nJul 07 20:31:20 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:20 managed-node1 podman[35510]: 2025-07-07 20:31:20.655609593 -0400 EDT m=+0.106726924 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:20 managed-node1 podman[35550]: 2025-07-07 20:31:20.686911258 -0400 EDT m=+0.019577458 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:20 managed-node1 podman[35550]: 2025-07-07 20:31:20.6980022 -0400 EDT m=+0.030668224 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:20 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5832.\nJul 07 20:31:20 managed-node1 podman[35550]: 2025-07-07 20:31:20.740675424 -0400 EDT m=+0.073341499 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:20 managed-node1 podman[35550]: 2025-07-07 20:31:20.743551454 -0400 EDT m=+0.076217500 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:20 managed-node1 auth_test_1_kube-auth_test_1_kube[35562]: This container is intended for podman CI testing\nJul 07 20:31:20 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:20 managed-node1 podman[35566]: 2025-07-07 20:31:20.777841216 -0400 EDT m=+0.019250596 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:20 managed-node1 podman[35566]: 2025-07-07 20:31:20.790073477 -0400 EDT m=+0.031482829 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:20 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5839.\nJul 07 20:31:20 managed-node1 podman[35566]: 2025-07-07 20:31:20.835978316 -0400 EDT m=+0.077387778 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:20 managed-node1 podman[35566]: 2025-07-07 20:31:20.838343884 -0400 EDT m=+0.079753324 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:20 managed-node1 auth_test_1_kube-auth_test_1_kube[35577]: This container is intended for podman CI testing\nJul 07 20:31:20 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:20 managed-node1 podman[35581]: 2025-07-07 20:31:20.871022031 -0400 EDT m=+0.020388383 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:20 managed-node1 podman[35581]: 2025-07-07 20:31:20.883687135 -0400 EDT m=+0.033053449 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:31:20 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5846.\nJul 07 20:31:20 managed-node1 podman[35581]: 2025-07-07 20:31:20.927067348 -0400 EDT m=+0.076433717 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:20 managed-node1 podman[35581]: 2025-07-07 20:31:20.929368311 -0400 EDT m=+0.078734664 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:20 managed-node1 auth_test_1_kube-auth_test_1_kube[35592]: This container is intended for podman CI testing\nJul 07 20:31:20 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:20 managed-node1 podman[35596]: 2025-07-07 20:31:20.959533941 -0400 EDT m=+0.021159359 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:20 managed-node1 podman[35596]: 2025-07-07 20:31:20.972214012 -0400 EDT m=+0.033839418 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:21 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5853.\nJul 07 20:31:21 managed-node1 podman[35596]: 2025-07-07 20:31:21.030229346 -0400 EDT m=+0.091854758 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:21 managed-node1 podman[35596]: 2025-07-07 20:31:21.032531933 -0400 EDT m=+0.094157437 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:21 managed-node1 auth_test_1_kube-auth_test_1_kube[35607]: This container is intended for podman CI testing\nJul 07 20:31:21 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:21 managed-node1 podman[35611]: 2025-07-07 20:31:21.063369026 -0400 EDT m=+0.019165461 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:21 managed-node1 podman[35611]: 2025-07-07 20:31:21.075787463 -0400 EDT m=+0.031583907 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:21 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5860.\nJul 07 20:31:21 managed-node1 podman[35611]: 2025-07-07 20:31:21.119252494 -0400 EDT m=+0.075048953 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:21 managed-node1 podman[35611]: 2025-07-07 20:31:21.121516588 -0400 EDT m=+0.077313068 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:21 managed-node1 auth_test_1_kube-auth_test_1_kube[35624]: This container is intended for podman CI testing\nJul 07 20:31:21 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:21 managed-node1 podman[35628]: 2025-07-07 20:31:21.155387475 -0400 EDT m=+0.020243992 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:21 managed-node1 podman[35628]: 2025-07-07 20:31:21.167967405 -0400 EDT m=+0.032823945 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:21 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5867.\nJul 07 20:31:21 managed-node1 podman[35628]: 2025-07-07 20:31:21.21209413 -0400 EDT m=+0.076950725 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:21 managed-node1 podman[35628]: 2025-07-07 20:31:21.21496234 -0400 EDT m=+0.079818903 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:21 managed-node1 auth_test_1_kube-auth_test_1_kube[35639]: This container is intended for podman CI testing\nJul 07 20:31:21 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:21 managed-node1 conmon[35639]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events\nJul 07 20:31:21 managed-node1 podman[35643]: 2025-07-07 20:31:21.247600124 -0400 EDT m=+0.019629048 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:21 managed-node1 podman[35643]: 2025-07-07 20:31:21.260246588 -0400 EDT m=+0.032275488 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:21 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5874.\nJul 07 20:31:21 managed-node1 podman[35643]: 2025-07-07 20:31:21.303265254 -0400 EDT m=+0.075294202 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:21 managed-node1 podman[35643]: 2025-07-07 20:31:21.305512693 -0400 EDT m=+0.077541623 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:31:21 managed-node1 auth_test_1_kube-auth_test_1_kube[35655]: This container is intended for podman CI testing\nJul 07 20:31:21 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:21 managed-node1 podman[35659]: 2025-07-07 20:31:21.338665243 -0400 EDT m=+0.020317934 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:31:21 managed-node1 podman[35659]: 2025-07-07 20:31:21.351272249 -0400 EDT m=+0.032924968 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:21 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5881.\nJul 07 20:31:21 managed-node1 podman[35659]: 2025-07-07 20:31:21.401104004 -0400 EDT m=+0.082757238 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:21 managed-node1 podman[35659]: 2025-07-07 20:31:21.40542619 -0400 EDT m=+0.087078930 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:21 managed-node1 auth_test_1_kube-auth_test_1_kube[35671]: This container is intended for podman CI testing\nJul 07 20:31:21 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:21 managed-node1 podman[35699]: 2025-07-07 20:31:21.453660664 -0400 EDT m=+0.035257593 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:21 managed-node1 podman[35699]: 2025-07-07 20:31:21.467879499 -0400 EDT m=+0.049476440 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:21 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5888.\nJul 07 20:31:21 managed-node1 podman[35699]: 2025-07-07 20:31:21.524192051 -0400 EDT m=+0.105789182 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:21 managed-node1 podman[35699]: 2025-07-07 20:31:21.52952801 -0400 EDT m=+0.111125008 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:21 managed-node1 auth_test_1_kube-auth_test_1_kube[35764]: This container is intended for podman CI testing\nJul 07 20:31:21 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:21 managed-node1 podman[35788]: 2025-07-07 20:31:21.568220445 -0400 EDT m=+0.024930216 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:21 managed-node1 podman[35788]: 2025-07-07 20:31:21.582776916 -0400 EDT m=+0.039486887 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:21 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5895.\nJul 07 20:31:21 managed-node1 podman[35788]: 2025-07-07 20:31:21.659098135 -0400 EDT m=+0.115808088 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:21 managed-node1 auth_test_1_kube-auth_test_1_kube[35831]: This container is intended for podman CI testing\nJul 07 20:31:21 managed-node1 podman[35788]: 2025-07-07 20:31:21.662979243 -0400 EDT m=+0.119689160 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:21 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:21 managed-node1 podman[35835]: 2025-07-07 20:31:21.712431425 -0400 EDT m=+0.032457509 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:21 managed-node1 podman[35835]: 2025-07-07 20:31:21.727152622 -0400 EDT m=+0.047178689 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:31:21 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5902.\nJul 07 20:31:21 managed-node1 python3.12[35829]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:31:21 managed-node1 podman[35835]: 2025-07-07 20:31:21.787507833 -0400 EDT m=+0.107533916 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:21 managed-node1 auth_test_1_kube-auth_test_1_kube[35846]: This container is intended for podman CI testing\nJul 07 20:31:21 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:21 managed-node1 podman[35835]: 2025-07-07 20:31:21.793169961 -0400 EDT m=+0.113195967 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:21 managed-node1 podman[35851]: 2025-07-07 20:31:21.827312399 -0400 EDT m=+0.024335220 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:31:21 managed-node1 podman[35851]: 2025-07-07 20:31:21.84144561 -0400 EDT m=+0.038468441 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:21 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5909.\nJul 07 20:31:21 managed-node1 podman[35851]: 2025-07-07 20:31:21.89141785 -0400 EDT m=+0.088440682 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:31:21 managed-node1 podman[35851]: 2025-07-07 20:31:21.89363808 -0400 EDT m=+0.090660834 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:21 managed-node1 auth_test_1_kube-auth_test_1_kube[35887]: This container is intended for podman CI testing\nJul 07 20:31:21 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:21 managed-node1 podman[35891]: 2025-07-07 20:31:21.923398352 -0400 EDT m=+0.021182183 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:21 managed-node1 podman[35891]: 2025-07-07 20:31:21.936001816 -0400 EDT m=+0.033785637 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:21 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5916.\nJul 07 20:31:21 managed-node1 podman[35891]: 2025-07-07 20:31:21.987441604 -0400 EDT m=+0.085225415 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:21 managed-node1 podman[35891]: 2025-07-07 20:31:21.990929649 -0400 EDT m=+0.088713552 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:21 managed-node1 auth_test_1_kube-auth_test_1_kube[35902]: This container is intended for podman CI testing\nJul 07 20:31:21 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:22 managed-node1 podman[35906]: 2025-07-07 20:31:22.0218562 -0400 EDT m=+0.017984372 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:22 managed-node1 podman[35906]: 2025-07-07 20:31:22.034844415 -0400 EDT m=+0.030972531 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:22 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5923.\nJul 07 20:31:22 managed-node1 podman[35906]: 2025-07-07 20:31:22.091025046 -0400 EDT m=+0.087153210 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:22 managed-node1 podman[35906]: 2025-07-07 20:31:22.094799693 -0400 EDT m=+0.090928157 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:22 managed-node1 auth_test_1_kube-auth_test_1_kube[35931]: This container is intended for podman CI testing\nJul 07 20:31:22 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:22 managed-node1 podman[35954]: 2025-07-07 20:31:22.139027999 -0400 EDT m=+0.028672465 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:22 managed-node1 podman[35954]: 2025-07-07 20:31:22.154020141 -0400 EDT m=+0.043664742 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:31:22 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5930.\nJul 07 20:31:22 managed-node1 podman[35954]: 2025-07-07 20:31:22.206647976 -0400 EDT m=+0.096292424 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:22 managed-node1 podman[35954]: 2025-07-07 20:31:22.209597636 -0400 EDT m=+0.099241976 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:22 managed-node1 auth_test_1_kube-auth_test_1_kube[36011]: This container is intended for podman CI testing\nJul 07 20:31:22 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:22 managed-node1 podman[36040]: 2025-07-07 20:31:22.259693095 -0400 EDT m=+0.034188439 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:22 managed-node1 podman[36040]: 2025-07-07 20:31:22.269510422 -0400 EDT m=+0.044005342 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:22 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5937.\nJul 07 20:31:22 managed-node1 podman[36040]: 2025-07-07 20:31:22.329965247 -0400 EDT m=+0.104460206 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:22 managed-node1 auth_test_1_kube-auth_test_1_kube[36079]: This container is intended for podman CI testing\nJul 07 20:31:22 managed-node1 podman[36040]: 2025-07-07 20:31:22.334452532 -0400 EDT m=+0.108947421 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:22 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:22 managed-node1 conmon[36079]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events\nJul 07 20:31:22 managed-node1 podman[36083]: 2025-07-07 20:31:22.384871133 -0400 EDT m=+0.032627544 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:22 managed-node1 podman[36083]: 2025-07-07 20:31:22.401248506 -0400 EDT m=+0.049004881 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:22 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5944.\nJul 07 20:31:22 managed-node1 podman[36083]: 2025-07-07 20:31:22.521351163 -0400 EDT m=+0.169107630 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:22 managed-node1 podman[36083]: 2025-07-07 20:31:22.527989504 -0400 EDT m=+0.175745785 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:22 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:22 managed-node1 auth_test_1_kube-auth_test_1_kube[36094]: This container is intended for podman CI testing\nJul 07 20:31:22 managed-node1 conmon[36094]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events\nJul 07 20:31:22 managed-node1 podman[36098]: 2025-07-07 20:31:22.583976206 -0400 EDT m=+0.037081633 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:22 managed-node1 python3.12[36073]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None\nJul 07 20:31:22 managed-node1 podman[36098]: 2025-07-07 20:31:22.59764438 -0400 EDT m=+0.050750034 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:22 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5951.\nJul 07 20:31:22 managed-node1 systemd[1]: Reload requested from client PID 36110 ('systemctl') (unit session-5.scope)...\nJul 07 20:31:22 managed-node1 systemd[1]: Reloading...\nJul 07 20:31:22 managed-node1 podman[36098]: 2025-07-07 20:31:22.659692815 -0400 EDT m=+0.112798231 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:31:22 managed-node1 podman[36098]: 2025-07-07 20:31:22.666902581 -0400 EDT m=+0.120007940 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:31:22 managed-node1 auth_test_1_kube-auth_test_1_kube[36112]: This container is intended for podman CI testing\nJul 07 20:31:22 managed-node1 podman[36134]: 2025-07-07 20:31:22.737682873 -0400 EDT m=+0.047915242 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:22 managed-node1 podman[36134]: 2025-07-07 20:31:22.752454858 -0400 EDT m=+0.062687129 container restart 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:22 managed-node1 systemd-rc-local-generator[36160]: /etc/rc.d/rc.local is not marked executable, skipping.\nJul 07 20:31:22 managed-node1 systemd[1]: Reloading finished in 243 ms.\nJul 07 20:31:22 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:22 managed-node1 systemd[1]: Stopping podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service - A template for running K8s workloads via podman-kube-play...\n\u2591\u2591 Subject: A stop job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5958.\nJul 07 20:31:22 managed-node1 systemd[1]: Started libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5959.\nJul 07 20:31:22 managed-node1 podman[36134]: 2025-07-07 20:31:22.993835809 -0400 EDT m=+0.304068216 container init 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:22 managed-node1 auth_test_1_kube-auth_test_1_kube[36185]: This container is intended for podman CI testing\nJul 07 20:31:23 managed-node1 systemd[1]: libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope has successfully entered the 'dead' state.\nJul 07 20:31:23 managed-node1 conmon[36185]: conmon 88fb929d50ddcb79a51c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice/libpod-88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e.scope/container/memory.events\nJul 07 20:31:23 managed-node1 podman[36134]: 2025-07-07 20:31:23.002373113 -0400 EDT m=+0.312605386 container start 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:31:23 managed-node1 podman[36183]: 2025-07-07 20:31:23.012615451 -0400 EDT m=+0.039513819 pod stop 8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd (image=, name=auth_test_1_kube)\nJul 07 20:31:23 managed-node1 podman[36183]: 2025-07-07 20:31:23.013929002 -0400 EDT m=+0.040827806 container died 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:23 managed-node1 systemd[1]: var-lib-containers-storage-overlay-343675d1c7962339170c5823c97d41383140939d7ff4380f29012cdee14c2784-merged.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay-343675d1c7962339170c5823c97d41383140939d7ff4380f29012cdee14c2784-merged.mount has successfully entered the 'dead' state.\nJul 07 20:31:23 managed-node1 podman[36183]: 2025-07-07 20:31:23.058780023 -0400 EDT m=+0.085678303 container cleanup 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:31:23 managed-node1 systemd[1]: libpod-62f9d4997b7b62b31bc6de8fed554fd83031d2fc9306d738f5c6b7ca27523eba.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-62f9d4997b7b62b31bc6de8fed554fd83031d2fc9306d738f5c6b7ca27523eba.scope has successfully entered the 'dead' state.\nJul 07 20:31:23 managed-node1 podman[36183]: 2025-07-07 20:31:23.078759519 -0400 EDT m=+0.105657943 container died 62f9d4997b7b62b31bc6de8fed554fd83031d2fc9306d738f5c6b7ca27523eba (image=, name=8f7847ede084-infra, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:23 managed-node1 systemd[1]: run-p20191-i20491.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit run-p20191-i20491.scope has successfully entered the 'dead' state.\nJul 07 20:31:23 managed-node1 kernel: podman1: port 1(veth1) entered disabled state\nJul 07 20:31:23 managed-node1 kernel: veth1 (unregistering): left allmulticast mode\nJul 07 20:31:23 managed-node1 kernel: veth1 (unregistering): left promiscuous mode\nJul 07 20:31:23 managed-node1 kernel: podman1: port 1(veth1) entered disabled state\nJul 07 20:31:23 managed-node1 NetworkManager[716]: [1751934683.1218] device (podman1): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed')\nJul 07 20:31:23 managed-node1 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service...\n\u2591\u2591 Subject: A start job for unit NetworkManager-dispatcher.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit NetworkManager-dispatcher.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5966.\nJul 07 20:31:23 managed-node1 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service.\n\u2591\u2591 Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit NetworkManager-dispatcher.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5966.\nJul 07 20:31:23 managed-node1 systemd[1]: run-netns-netns\\x2d71306de9\\x2df283\\x2db61f\\x2d4a69\\x2db69d27093002.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit run-netns-netns\\x2d71306de9\\x2df283\\x2db61f\\x2d4a69\\x2db69d27093002.mount has successfully entered the 'dead' state.\nJul 07 20:31:23 managed-node1 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-62f9d4997b7b62b31bc6de8fed554fd83031d2fc9306d738f5c6b7ca27523eba-rootfs-merge.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-62f9d4997b7b62b31bc6de8fed554fd83031d2fc9306d738f5c6b7ca27523eba-rootfs-merge.mount has successfully entered the 'dead' state.\nJul 07 20:31:23 managed-node1 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-62f9d4997b7b62b31bc6de8fed554fd83031d2fc9306d738f5c6b7ca27523eba-userdata-shm.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-62f9d4997b7b62b31bc6de8fed554fd83031d2fc9306d738f5c6b7ca27523eba-userdata-shm.mount has successfully entered the 'dead' state.\nJul 07 20:31:23 managed-node1 podman[36183]: 2025-07-07 20:31:23.183498632 -0400 EDT m=+0.210396942 container cleanup 62f9d4997b7b62b31bc6de8fed554fd83031d2fc9306d738f5c6b7ca27523eba (image=, name=8f7847ede084-infra, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:23 managed-node1 systemd[1]: Removed slice machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice - cgroup machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice.\n\u2591\u2591 Subject: A stop job for unit machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 6045 and the job result is done.\nJul 07 20:31:23 managed-node1 systemd[1]: machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice: Consumed 1.857s CPU time, 1M memory peak.\n\u2591\u2591 Subject: Resources consumed by unit runtime\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice completed and consumed the indicated resources.\nJul 07 20:31:23 managed-node1 podman[36183]: 2025-07-07 20:31:23.212685884 -0400 EDT m=+0.239584198 container remove 88fb929d50ddcb79a51c0e7cdfe908cd08b039a3d33b4f740fcdc4ffd44b6b1e (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:31:23 managed-node1 podman[36183]: 2025-07-07 20:31:23.238925424 -0400 EDT m=+0.265823745 container remove 62f9d4997b7b62b31bc6de8fed554fd83031d2fc9306d738f5c6b7ca27523eba (image=, name=8f7847ede084-infra, pod_id=8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:23 managed-node1 systemd[1]: machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice: Failed to open /run/systemd/transient/machine-libpod_pod_8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd.slice: No such file or directory\nJul 07 20:31:23 managed-node1 podman[36183]: 2025-07-07 20:31:23.24814718 -0400 EDT m=+0.275045457 pod remove 8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd (image=, name=auth_test_1_kube)\nJul 07 20:31:23 managed-node1 podman[36183]: 2025-07-07 20:31:23.251227715 -0400 EDT m=+0.278126263 container kill c378f913cea4ae57a9dcec0c838333f43982d213f895ef3f3c6af3dab5cfecc9 (image=, name=3ef6fcac6278-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:23 managed-node1 systemd[1]: libpod-c378f913cea4ae57a9dcec0c838333f43982d213f895ef3f3c6af3dab5cfecc9.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-c378f913cea4ae57a9dcec0c838333f43982d213f895ef3f3c6af3dab5cfecc9.scope has successfully entered the 'dead' state.\nJul 07 20:31:23 managed-node1 conmon[20171]: conmon c378f913cea4ae57a9dc : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/libpod-c378f913cea4ae57a9dcec0c838333f43982d213f895ef3f3c6af3dab5cfecc9.scope/container/memory.events\nJul 07 20:31:23 managed-node1 podman[36183]: 2025-07-07 20:31:23.259097255 -0400 EDT m=+0.285995664 container died c378f913cea4ae57a9dcec0c838333f43982d213f895ef3f3c6af3dab5cfecc9 (image=, name=3ef6fcac6278-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:23 managed-node1 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-c378f913cea4ae57a9dcec0c838333f43982d213f895ef3f3c6af3dab5cfecc9-rootfs-merge.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-c378f913cea4ae57a9dcec0c838333f43982d213f895ef3f3c6af3dab5cfecc9-rootfs-merge.mount has successfully entered the 'dead' state.\nJul 07 20:31:23 managed-node1 podman[36183]: 2025-07-07 20:31:23.3241034 -0400 EDT m=+0.351001711 container remove c378f913cea4ae57a9dcec0c838333f43982d213f895ef3f3c6af3dab5cfecc9 (image=, name=3ef6fcac6278-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:31:23 managed-node1 podman[36183]: Pods stopped:\nJul 07 20:31:23 managed-node1 podman[36183]: 8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd\nJul 07 20:31:23 managed-node1 podman[36183]: Pods removed:\nJul 07 20:31:23 managed-node1 podman[36183]: 8f7847ede0845ba435432949858e8bd20179834bb7c5b2379438724e7b9222cd\nJul 07 20:31:23 managed-node1 podman[36183]: Secrets removed:\nJul 07 20:31:23 managed-node1 podman[36183]: Volumes removed:\nJul 07 20:31:23 managed-node1 systemd[1]: podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service has successfully entered the 'dead' state.\nJul 07 20:31:23 managed-node1 systemd[1]: Stopped podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service - A template for running K8s workloads via podman-kube-play.\n\u2591\u2591 Subject: A stop job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5958 and the job result is done.\nJul 07 20:31:23 managed-node1 systemd[1]: podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service: Consumed 33.557s CPU time, 37M memory peak.\n\u2591\u2591 Subject: Resources consumed by unit runtime\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service completed and consumed the indicated resources.\nJul 07 20:31:23 managed-node1 python3.12[36402]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:31:24 managed-node1 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-c378f913cea4ae57a9dcec0c838333f43982d213f895ef3f3c6af3dab5cfecc9-userdata-shm.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-c378f913cea4ae57a9dcec0c838333f43982d213f895ef3f3c6af3dab5cfecc9-userdata-shm.mount has successfully entered the 'dead' state.\nJul 07 20:31:24 managed-node1 python3.12[36559]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 07 20:31:24 managed-node1 python3.12[36559]: ansible-containers.podman.podman_play version: 5.5.1, kube file /etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml\nJul 07 20:31:24 managed-node1 python3.12[36727]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:31:25 managed-node1 python3.12[36882]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:31:27 managed-node1 python3.12[37039]: ansible-systemd Invoked with name=auth_test_1_quadlet.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None\nJul 07 20:31:27 managed-node1 systemd[1]: Reload requested from client PID 37042 ('systemctl') (unit session-5.scope)...\nJul 07 20:31:27 managed-node1 systemd[1]: Reloading...\nJul 07 20:31:27 managed-node1 systemd-rc-local-generator[37081]: /etc/rc.d/rc.local is not marked executable, skipping.\nJul 07 20:31:27 managed-node1 systemd[1]: Reloading finished in 193 ms.\nJul 07 20:31:27 managed-node1 python3.12[37252]: ansible-stat Invoked with path=/etc/containers/systemd/auth_test_1_quadlet.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:31:28 managed-node1 python3.12[37564]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:31:30 managed-node1 python3.12[37875]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:31:31 managed-node1 python3.12[38036]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:31:33 managed-node1 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state.\nJul 07 20:31:34 managed-node1 python3.12[38194]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:31:35 managed-node1 python3.12[38351]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:31:37 managed-node1 python3.12[38508]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:31:38 managed-node1 python3.12[38665]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:31:38 managed-node1 python3.12[38821]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None\nJul 07 20:31:39 managed-node1 python3.12[38978]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:31:39 managed-node1 python3.12[39133]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:31:40 managed-node1 python3.12[39288]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:31:41 managed-node1 python3.12[39445]: ansible-systemd Invoked with name=auth_test_1_quadlet.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None\nJul 07 20:31:41 managed-node1 systemd[1]: Reload requested from client PID 39448 ('systemctl') (unit session-5.scope)...\nJul 07 20:31:41 managed-node1 systemd[1]: Reloading...\nJul 07 20:31:42 managed-node1 systemd-rc-local-generator[39493]: /etc/rc.d/rc.local is not marked executable, skipping.\nJul 07 20:31:42 managed-node1 systemd[1]: Reloading finished in 192 ms.\nJul 07 20:31:42 managed-node1 python3.12[39657]: ansible-stat Invoked with path=/etc/containers/systemd/auth_test_1_quadlet.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:31:43 managed-node1 python3.12[39969]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:31:45 managed-node1 python3.12[40280]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:31:46 managed-node1 python3.12[40441]: ansible-getent Invoked with database=passwd key=auth_test_user1 fail_key=False service=None split=None\nJul 07 20:31:46 managed-node1 python3.12[40597]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:31:47 managed-node1 python3.12[40753]: ansible-user Invoked with name=auth_test_user1 state=absent non_unique=False force=False remove=False create_home=True system=False move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node1 update_password=always uid=None group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None\nJul 07 20:31:47 managed-node1 python3.12[40909]: ansible-file Invoked with path=/home/auth_test_user1 state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:31:48 managed-node1 python3.12[41064]: ansible-ansible.legacy.command Invoked with _raw_params=podman inspect podman_registry --format '{{range .}}{{range .Mounts}}{{if eq .Type \"volume\"}}{{.Name}}{{end}}{{end}}{{end}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:31:48 managed-node1 python3.12[41227]: ansible-ansible.legacy.command Invoked with _raw_params=podman rm -f podman_registry _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:31:48 managed-node1 systemd[1]: libpod-cd37f7eebf3f1a0d1061dbea9630cc5859593ef5dfdb627a7bec92189b0c5ff0.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-cd37f7eebf3f1a0d1061dbea9630cc5859593ef5dfdb627a7bec92189b0c5ff0.scope has successfully entered the 'dead' state.\nJul 07 20:31:48 managed-node1 podman[41228]: 2025-07-07 20:31:48.594627818 -0400 EDT m=+0.040810779 container died cd37f7eebf3f1a0d1061dbea9630cc5859593ef5dfdb627a7bec92189b0c5ff0 (image=quay.io/libpod/registry:2.8.2, name=podman_registry)\nJul 07 20:31:48 managed-node1 kernel: podman0: port 1(veth0) entered disabled state\nJul 07 20:31:48 managed-node1 kernel: veth0 (unregistering): left allmulticast mode\nJul 07 20:31:48 managed-node1 kernel: veth0 (unregistering): left promiscuous mode\nJul 07 20:31:48 managed-node1 kernel: podman0: port 1(veth0) entered disabled state\nJul 07 20:31:48 managed-node1 NetworkManager[716]: [1751934708.6321] device (podman0): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed')\nJul 07 20:31:48 managed-node1 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service...\n\u2591\u2591 Subject: A start job for unit NetworkManager-dispatcher.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit NetworkManager-dispatcher.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 6046.\nJul 07 20:31:48 managed-node1 systemd[1]: run-netns-netns\\x2d54766868\\x2d56df\\x2d359a\\x2d3bba\\x2d291deccab973.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit run-netns-netns\\x2d54766868\\x2d56df\\x2d359a\\x2d3bba\\x2d291deccab973.mount has successfully entered the 'dead' state.\nJul 07 20:31:48 managed-node1 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service.\n\u2591\u2591 Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit NetworkManager-dispatcher.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 6046.\nJul 07 20:31:48 managed-node1 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-cd37f7eebf3f1a0d1061dbea9630cc5859593ef5dfdb627a7bec92189b0c5ff0-userdata-shm.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-cd37f7eebf3f1a0d1061dbea9630cc5859593ef5dfdb627a7bec92189b0c5ff0-userdata-shm.mount has successfully entered the 'dead' state.\nJul 07 20:31:48 managed-node1 systemd[1]: var-lib-containers-storage-overlay-90a43350bb4f6c8f9132c8d0276de4bda61dad2186899fe1f5ee6b12d58cb721-merged.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay-90a43350bb4f6c8f9132c8d0276de4bda61dad2186899fe1f5ee6b12d58cb721-merged.mount has successfully entered the 'dead' state.\nJul 07 20:31:48 managed-node1 podman[41228]: 2025-07-07 20:31:48.718671537 -0400 EDT m=+0.164854466 container remove cd37f7eebf3f1a0d1061dbea9630cc5859593ef5dfdb627a7bec92189b0c5ff0 (image=quay.io/libpod/registry:2.8.2, name=podman_registry)\nJul 07 20:31:48 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:31:48 managed-node1 systemd[1]: libpod-conmon-cd37f7eebf3f1a0d1061dbea9630cc5859593ef5dfdb627a7bec92189b0c5ff0.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-conmon-cd37f7eebf3f1a0d1061dbea9630cc5859593ef5dfdb627a7bec92189b0c5ff0.scope has successfully entered the 'dead' state.\nJul 07 20:31:49 managed-node1 python3.12[41421]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume rm 4f496486c0bcf1b762b3273588256ef269fddf531cc488eaf050a62e6391a121 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:31:49 managed-node1 podman[41422]: 2025-07-07 20:31:49.188296836 -0400 EDT m=+0.024296713 volume remove 4f496486c0bcf1b762b3273588256ef269fddf531cc488eaf050a62e6391a121\nJul 07 20:31:49 managed-node1 python3.12[41584]: ansible-file Invoked with path=/tmp/lsr_urjcw20a_podman state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:31:51 managed-node1 python3.12[41790]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d\nJul 07 20:31:52 managed-node1 python3.12[41974]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:31:53 managed-node1 python3.12[42129]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:31:55 managed-node1 python3.12[42439]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:31:55 managed-node1 python3.12[42600]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 07 20:31:56 managed-node1 python3.12[42756]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:31:58 managed-node1 python3.12[42914]: ansible-tempfile Invoked with state=directory prefix=lsr_podman_config_ suffix= path=None\nJul 07 20:31:58 managed-node1 python3.12[43069]: ansible-ansible.legacy.command Invoked with _raw_params=tar --ignore-failed-read -c -P -v -p -f /tmp/lsr_podman_config_acix87c_/backup.tar /etc/containers/containers.conf.d/50-systemroles.conf /etc/containers/registries.conf.d/50-systemroles.conf /etc/containers/storage.conf /etc/containers/policy.json _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:31:58 managed-node1 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state.\nJul 07 20:31:59 managed-node1 python3.12[43226]: ansible-user Invoked with name=user1 state=present non_unique=False force=False remove=False create_home=True system=False move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node1 update_password=always uid=None group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None\nJul 07 20:31:59 managed-node1 useradd[43228]: new group: name=user1, GID=1000\nJul 07 20:31:59 managed-node1 useradd[43228]: new user: name=user1, UID=1000, GID=1000, home=/home/user1, shell=/bin/bash, from=/dev/pts/0\nJul 07 20:32:01 managed-node1 python3.12[43538]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:32:01 managed-node1 python3.12[43700]: ansible-getent Invoked with database=passwd key=user1 fail_key=False service=None split=None\nJul 07 20:32:02 managed-node1 python3.12[43857]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:32:02 managed-node1 python3.12[44014]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:32:03 managed-node1 python3.12[44170]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:32:04 managed-node1 python3.12[44326]: ansible-file Invoked with path=/home/user1/.config/containers/containers.conf.d state=directory owner=user1 group=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:32:04 managed-node1 python3.12[44481]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:32:04 managed-node1 python3.12[44606]: ansible-ansible.legacy.copy Invoked with dest=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf owner=user1 group=user1 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751934724.2204754-13137-245227831768024/.source.conf _original_basename=.90zqvsbs follow=False checksum=b1776092f2908d76e11fd6af87267469b2c17d5a backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:32:05 managed-node1 python3.12[44761]: ansible-file Invoked with path=/home/user1/.config/containers/registries.conf.d state=directory owner=user1 group=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:32:05 managed-node1 python3.12[44916]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:32:06 managed-node1 python3.12[45041]: ansible-ansible.legacy.copy Invoked with dest=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf owner=user1 group=user1 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751934725.5107205-13166-107729205740759/.source.conf _original_basename=.ceqnuqdb follow=False checksum=fde25488ce7040f1639af7bfc88ed125318cc0b0 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:32:06 managed-node1 python3.12[45196]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:32:06 managed-node1 python3.12[45351]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/storage.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:32:07 managed-node1 python3.12[45476]: ansible-ansible.legacy.copy Invoked with dest=/home/user1/.config/containers/storage.conf owner=user1 group=user1 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751934726.699786-13195-91714043369387/.source.conf _original_basename=.31lup0x9 follow=False checksum=38f015f4780579bd388dd955b42916199fd7fe19 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:32:07 managed-node1 python3.12[45631]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:32:08 managed-node1 python3.12[45786]: ansible-stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:32:08 managed-node1 python3.12[45941]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:32:08 managed-node1 python3.12[46066]: ansible-ansible.legacy.copy Invoked with dest=/home/user1/.config/containers/policy.json owner=user1 group=user1 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751934728.243613-13238-43275417211631/.source.json _original_basename=.5gmxjq7z follow=False checksum=6746c079ad563b735fc39f73d4876654b80b0a0d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:32:09 managed-node1 python3.12[46221]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:32:10 managed-node1 python3.12[46378]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:32:10 managed-node1 python3.12[46534]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:32:11 managed-node1 python3.12[46690]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:32:12 managed-node1 python3.12[47125]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:32:13 managed-node1 python3.12[47282]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:32:13 managed-node1 python3.12[47438]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:32:14 managed-node1 python3.12[47594]: ansible-stat Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:32:15 managed-node1 python3.12[47751]: ansible-stat Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:32:15 managed-node1 python3.12[47908]: ansible-stat Invoked with path=/home/user1/.config/containers/storage.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:32:16 managed-node1 python3.12[48065]: ansible-stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:32:16 managed-node1 python3.12[48222]: ansible-ansible.legacy.command Invoked with _raw_params=grep 'container_name_as_hostname[ ]*=[ ]*true' /home/user1/.config/containers/containers.conf.d/50-systemroles.conf _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:32:18 managed-node1 python3.12[48533]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:32:19 managed-node1 python3.12[48694]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:32:19 managed-node1 python3.12[48851]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:32:20 managed-node1 python3.12[49007]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:32:20 managed-node1 python3.12[49163]: ansible-file Invoked with path=/home/user1/.config/containers/containers.conf.d state=directory owner=user1 group=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:32:21 managed-node1 python3.12[49318]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:32:21 managed-node1 python3.12[49396]: ansible-ansible.legacy.file Invoked with owner=user1 group=user1 mode=0644 dest=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf _original_basename=.mja6pw9a recurse=False state=file path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:32:21 managed-node1 python3.12[49551]: ansible-file Invoked with path=/home/user1/.config/containers/registries.conf.d state=directory owner=user1 group=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:32:22 managed-node1 python3.12[49706]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:32:22 managed-node1 python3.12[49784]: ansible-ansible.legacy.file Invoked with owner=user1 group=user1 mode=0644 dest=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf _original_basename=.j2jbew9d recurse=False state=file path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:32:23 managed-node1 python3.12[49939]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:32:23 managed-node1 python3.12[50095]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/storage.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:32:23 managed-node1 python3.12[50173]: ansible-ansible.legacy.file Invoked with owner=user1 group=user1 mode=0644 dest=/home/user1/.config/containers/storage.conf _original_basename=.z_096_sp recurse=False state=file path=/home/user1/.config/containers/storage.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:32:24 managed-node1 python3.12[50328]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:32:24 managed-node1 python3.12[50483]: ansible-stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:32:25 managed-node1 python3.12[50640]: ansible-slurp Invoked with path=/home/user1/.config/containers/policy.json src=/home/user1/.config/containers/policy.json\nJul 07 20:32:25 managed-node1 python3.12[50795]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:32:26 managed-node1 python3.12[50953]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:32:26 managed-node1 python3.12[51109]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:32:27 managed-node1 python3.12[51266]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:32:28 managed-node1 python3.12[51654]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:32:29 managed-node1 python3.12[51811]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:32:29 managed-node1 python3.12[51967]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:32:30 managed-node1 python3.12[52123]: ansible-stat Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:32:30 managed-node1 python3.12[52280]: ansible-stat Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:32:31 managed-node1 python3.12[52437]: ansible-stat Invoked with path=/home/user1/.config/containers/storage.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:32:31 managed-node1 python3.12[52594]: ansible-stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:32:33 managed-node1 python3.12[52906]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:32:34 managed-node1 python3.12[53067]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 07 20:32:34 managed-node1 python3.12[53223]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:32:35 managed-node1 python3.12[53380]: ansible-file Invoked with path=/etc/containers/containers.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:32:36 managed-node1 python3.12[53535]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:32:36 managed-node1 python3.12[53660]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/containers.conf.d/50-systemroles.conf owner=root mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751934756.0927734-14109-6617328344651/.source.conf _original_basename=.e_8i3w3o follow=False checksum=b1776092f2908d76e11fd6af87267469b2c17d5a backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:32:37 managed-node1 python3.12[53815]: ansible-file Invoked with path=/etc/containers/registries.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:32:37 managed-node1 python3.12[53970]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:32:37 managed-node1 python3.12[54095]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/registries.conf.d/50-systemroles.conf owner=root mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751934757.322634-14160-134327254316543/.source.conf _original_basename=.yy1ryvyi follow=False checksum=fde25488ce7040f1639af7bfc88ed125318cc0b0 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:32:38 managed-node1 python3.12[54250]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:32:38 managed-node1 python3.12[54405]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:32:39 managed-node1 python3.12[54530]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/storage.conf owner=root mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751934758.6517808-14205-147389732057554/.source.conf _original_basename=.q90e1n48 follow=False checksum=38f015f4780579bd388dd955b42916199fd7fe19 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:32:39 managed-node1 python3.12[54685]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:32:40 managed-node1 python3.12[54840]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:32:40 managed-node1 python3.12[54997]: ansible-slurp Invoked with path=/etc/containers/policy.json src=/etc/containers/policy.json\nJul 07 20:32:41 managed-node1 python3.12[55152]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:32:41 managed-node1 python3.12[55279]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/policy.json owner=root mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751934760.7385497-14276-15721655439210/.source.json _original_basename=.355w7cpi follow=False checksum=6746c079ad563b735fc39f73d4876654b80b0a0d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:32:42 managed-node1 python3.12[55434]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:32:43 managed-node1 python3.12[55591]: ansible-file Invoked with path=/root/.config/containers state=directory owner=root group=0 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:32:45 managed-node1 python3.12[56028]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:32:46 managed-node1 python3.12[56185]: ansible-stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:32:47 managed-node1 python3.12[56342]: ansible-stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:32:47 managed-node1 python3.12[56499]: ansible-stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:32:47 managed-node1 python3.12[56656]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:32:50 managed-node1 python3.12[56968]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:32:51 managed-node1 python3.12[57129]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:32:52 managed-node1 python3.12[57286]: ansible-file Invoked with path=/etc/containers/containers.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:32:52 managed-node1 python3.12[57441]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:32:52 managed-node1 python3.12[57519]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/containers.conf.d/50-systemroles.conf _original_basename=.4sbcez6l recurse=False state=file path=/etc/containers/containers.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:32:53 managed-node1 python3.12[57674]: ansible-file Invoked with path=/etc/containers/registries.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:32:53 managed-node1 python3.12[57829]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:32:54 managed-node1 python3.12[57907]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/registries.conf.d/50-systemroles.conf _original_basename=.z1r_2_b3 recurse=False state=file path=/etc/containers/registries.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:32:54 managed-node1 python3.12[58062]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:32:54 managed-node1 python3.12[58217]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:32:55 managed-node1 python3.12[58295]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/storage.conf _original_basename=.e7w1bw_a recurse=False state=file path=/etc/containers/storage.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:32:55 managed-node1 python3.12[58450]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:32:56 managed-node1 python3.12[58605]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:32:56 managed-node1 python3.12[58762]: ansible-slurp Invoked with path=/etc/containers/policy.json src=/etc/containers/policy.json\nJul 07 20:32:57 managed-node1 python3.12[58917]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:32:58 managed-node1 python3.12[59074]: ansible-file Invoked with path=/root/.config/containers state=directory owner=root group=0 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:33:00 managed-node1 python3.12[59462]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:33:01 managed-node1 python3.12[59619]: ansible-stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:33:01 managed-node1 python3.12[59776]: ansible-stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:33:02 managed-node1 python3.12[59933]: ansible-stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:33:02 managed-node1 python3.12[60090]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:33:03 managed-node1 python3.12[60247]: ansible-slurp Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf src=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf\nJul 07 20:33:03 managed-node1 python3.12[60402]: ansible-slurp Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf src=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf\nJul 07 20:33:04 managed-node1 python3.12[60557]: ansible-slurp Invoked with path=/home/user1/.config/containers/storage.conf src=/home/user1/.config/containers/storage.conf\nJul 07 20:33:04 managed-node1 python3.12[60712]: ansible-slurp Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf src=/etc/containers/containers.conf.d/50-systemroles.conf\nJul 07 20:33:05 managed-node1 python3.12[60867]: ansible-slurp Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf src=/etc/containers/registries.conf.d/50-systemroles.conf\nJul 07 20:33:05 managed-node1 python3.12[61022]: ansible-slurp Invoked with path=/etc/containers/storage.conf src=/etc/containers/storage.conf\nJul 07 20:33:08 managed-node1 python3.12[61332]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:33:09 managed-node1 python3.12[61493]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:33:10 managed-node1 python3.12[61650]: ansible-file Invoked with path=/etc/containers/containers.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:33:10 managed-node1 python3.12[61805]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:33:11 managed-node1 python3.12[61932]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/containers.conf.d/50-systemroles.conf owner=root mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751934790.4974134-15303-224512391964356/.source.conf _original_basename=.p1xuaqrk follow=False checksum=9694c1d1c700a6435eecf4066b052584f4ee94c0 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:33:11 managed-node1 python3.12[62087]: ansible-file Invoked with path=/etc/containers/registries.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:33:11 managed-node1 python3.12[62243]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:33:12 managed-node1 python3.12[62321]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/registries.conf.d/50-systemroles.conf _original_basename=.6uhx9rju recurse=False state=file path=/etc/containers/registries.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:33:12 managed-node1 python3.12[62476]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:33:13 managed-node1 python3.12[62631]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:33:13 managed-node1 python3.12[62709]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/storage.conf _original_basename=.n46t0ec7 recurse=False state=file path=/etc/containers/storage.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:33:13 managed-node1 python3.12[62864]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:33:14 managed-node1 python3.12[63019]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:33:14 managed-node1 python3.12[63176]: ansible-slurp Invoked with path=/etc/containers/policy.json src=/etc/containers/policy.json\nJul 07 20:33:15 managed-node1 python3.12[63331]: ansible-slurp Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf src=/etc/containers/containers.conf.d/50-systemroles.conf\nJul 07 20:33:16 managed-node1 python3.12[63487]: ansible-file Invoked with state=absent path=/etc/containers/containers.conf.d/50-systemroles.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:33:16 managed-node1 python3.12[63642]: ansible-file Invoked with state=absent path=/etc/containers/registries.conf.d/50-systemroles.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:33:17 managed-node1 python3.12[63797]: ansible-file Invoked with state=absent path=/etc/containers/storage.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:33:17 managed-node1 python3.12[63952]: ansible-file Invoked with state=absent path=/etc/containers/policy.json recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:33:17 managed-node1 python3.12[64108]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:33:18 managed-node1 python3.12[64263]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:33:18 managed-node1 python3.12[64418]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/storage.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:33:18 managed-node1 python3.12[64573]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/policy.json recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:33:19 managed-node1 python3.12[64728]: ansible-file Invoked with state=absent path=/root/.config/containers/auth.json recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:33:19 managed-node1 python3.12[64883]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/auth.json recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:33:19 managed-node1 python3.12[65038]: ansible-ansible.legacy.command Invoked with _raw_params=tar xfvpP /tmp/lsr_podman_config_acix87c_/backup.tar _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:33:20 managed-node1 python3.12[65194]: ansible-file Invoked with state=absent path=/tmp/lsr_podman_config_acix87c_ recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:33:22 managed-node1 python3.12[65400]: ansible-setup Invoked with gather_subset=['!all', '!min', 'distribution', 'distribution_major_version', 'distribution_version', 'os_family'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d\nJul 07 20:33:22 managed-node1 python3.12[65557]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:33:22 managed-node1 python3.12[65712]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:33:24 managed-node1 python3.12[66022]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:33:25 managed-node1 python3.12[66184]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 07 20:33:26 managed-node1 python3.12[66340]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:33:29 managed-node1 python3.12[66548]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d\nJul 07 20:33:32 managed-node1 python3.12[66732]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:33:33 managed-node1 python3.12[66887]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:33:35 managed-node1 python3.12[67197]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:33:35 managed-node1 python3.12[67359]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 07 20:33:36 managed-node1 python3.12[67515]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:33:40 managed-node1 python3.12[67723]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d\nJul 07 20:33:41 managed-node1 python3.12[67907]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:33:41 managed-node1 python3.12[68062]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:33:43 managed-node1 python3.12[68372]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:33:44 managed-node1 python3.12[68533]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 07 20:33:44 managed-node1 python3.12[68689]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:33:46 managed-node1 python3.12[68846]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:33:48 managed-node1 python3.12[69003]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:33:48 managed-node1 python3.12[69158]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/nopull.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:33:49 managed-node1 python3.12[69283]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1751934828.656423-17050-224685263014320/.source.container dest=/etc/containers/systemd/nopull.container owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=670d64fc68a9768edb20cad26df2acc703542d85 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:33:51 managed-node1 python3.12[69593]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:33:52 managed-node1 python3.12[69754]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:33:54 managed-node1 python3.12[69911]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:33:55 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:33:55 managed-node1 podman[70077]: 2025-07-07 20:33:55.866838654 -0400 EDT m=+0.019750887 image pull-error this_is_a_bogus_image:latest short-name resolution enforced but cannot prompt without a TTY\nJul 07 20:33:55 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:33:56 managed-node1 python3.12[70238]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:33:56 managed-node1 python3.12[70393]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/bogus.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:33:57 managed-node1 python3.12[70518]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1751934836.4283967-17279-9425111643274/.source.container dest=/etc/containers/systemd/bogus.container owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=1d087e679d135214e8ac9ccaf33b2222916efb7f backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:33:59 managed-node1 python3.12[70828]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:34:00 managed-node1 python3.12[70990]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:34:02 managed-node1 python3.12[71147]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:34:03 managed-node1 python3.12[71304]: ansible-systemd Invoked with name=nopull.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None\nJul 07 20:34:04 managed-node1 python3.12[71460]: ansible-stat Invoked with path=/etc/containers/systemd/nopull.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:34:05 managed-node1 python3.12[71772]: ansible-ansible.legacy.command Invoked with _raw_params=set -x\n set -o pipefail\n exec 1>&2\n #podman volume rm --all\n #podman network prune -f\n podman volume ls\n podman network ls\n podman secret ls\n podman container ls\n podman pod ls\n podman images\n systemctl list-units | grep quadlet\n _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:34:05 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:34:05 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:34:06 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:34:07 managed-node1 python3.12[72130]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:34:08 managed-node1 python3.12[72291]: ansible-getent Invoked with database=passwd key=user_quadlet_basic fail_key=False service=None split=None\nJul 07 20:34:08 managed-node1 python3.12[72447]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None", "task_name": "Dump journal", "task_path": "/tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:336" } ] SYSTEM ROLES ERRORS END v1 TASKS RECAP ******************************************************************** Monday 07 July 2025 20:34:08 -0400 (0:00:00.438) 0:00:29.111 *********** =============================================================================== Gathering Facts --------------------------------------------------------- 1.14s /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:9 fedora.linux_system_roles.podman : Gather the package facts ------------- 1.12s /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 fedora.linux_system_roles.podman : Gather the package facts ------------- 0.96s /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 fedora.linux_system_roles.podman : Gather the package facts ------------- 0.94s /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 fedora.linux_system_roles.podman : Gather the package facts ------------- 0.94s /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 fedora.linux_system_roles.podman : Ensure quadlet file is present ------- 0.84s /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:75 fedora.linux_system_roles.podman : Stop and disable service ------------- 0.78s /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 fedora.linux_system_roles.podman : Ensure quadlet file is present ------- 0.70s /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:75 fedora.linux_system_roles.podman : Ensure the quadlet directory is present --- 0.59s /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 fedora.linux_system_roles.podman : Ensure container images are present --- 0.58s /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Debug3 ------------------------------------------------------------------ 0.55s /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:270 fedora.linux_system_roles.podman : Slurp quadlet file ------------------- 0.52s /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 fedora.linux_system_roles.podman : Get podman version ------------------- 0.48s /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 fedora.linux_system_roles.podman : Get user information ----------------- 0.46s /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Dump journal ------------------------------------------------------------ 0.44s /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:336 fedora.linux_system_roles.podman : See if quadlet file exists ----------- 0.43s /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:34 fedora.linux_system_roles.podman : Check if system is ostree ------------ 0.43s /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 fedora.linux_system_roles.podman : Get podman version ------------------- 0.39s /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 fedora.linux_system_roles.podman : Get podman version ------------------- 0.39s /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 fedora.linux_system_roles.podman : See if getsubids exists -------------- 0.39s /tmp/collections-hA1/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31